summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/clusters/applications_controller_spec.rb139
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb46
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb33
-rw-r--r--spec/controllers/admin/services_controller_spec.rb75
-rw-r--r--spec/controllers/admin/users_controller_spec.rb14
-rw-r--r--spec/controllers/concerns/redis_tracking_spec.rb6
-rw-r--r--spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb71
-rw-r--r--spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb74
-rw-r--r--spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb60
-rw-r--r--spec/controllers/concerns/spammable_actions_spec.rb112
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb2
-rw-r--r--spec/controllers/groups/clusters/applications_controller_spec.rb148
-rw-r--r--spec/controllers/groups/dependency_proxy_auth_controller_spec.rb57
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb143
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb27
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb10
-rw-r--r--spec/controllers/import/available_namespaces_controller_spec.rb88
-rw-r--r--spec/controllers/import/manifest_controller_spec.rb1
-rw-r--r--spec/controllers/invites_controller_spec.rb69
-rw-r--r--spec/controllers/jira_connect/app_descriptor_controller_spec.rb81
-rw-r--r--spec/controllers/jira_connect/branches_controller_spec.rb47
-rw-r--r--spec/controllers/jira_connect/subscriptions_controller_spec.rb48
-rw-r--r--spec/controllers/profiles/notifications_controller_spec.rb2
-rw-r--r--spec/controllers/profiles/two_factor_auths_controller_spec.rb12
-rw-r--r--spec/controllers/profiles_controller_spec.rb11
-rw-r--r--spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb148
-rw-r--r--spec/controllers/projects/clusters/applications_controller_spec.rb215
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb49
-rw-r--r--spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb2
-rw-r--r--spec/controllers/projects/error_tracking_controller_spec.rb6
-rw-r--r--spec/controllers/projects/feature_flags_controller_spec.rb8
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb2
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb158
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb179
-rw-r--r--spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb8
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb14
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb12
-rw-r--r--spec/controllers/projects/services_controller_spec.rb77
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb20
-rw-r--r--spec/controllers/projects_controller_spec.rb57
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb22
-rw-r--r--spec/controllers/registrations_controller_spec.rb62
-rw-r--r--spec/controllers/search_controller_spec.rb16
-rw-r--r--spec/controllers/snippets_controller_spec.rb2
-rw-r--r--spec/crystalball_env.rb2
-rw-r--r--spec/db/schema_spec.rb3
-rw-r--r--spec/deprecation_toolkit_env.rb10
-rw-r--r--spec/experiments/application_experiment_spec.rb114
-rw-r--r--spec/experiments/force_company_trial_experiment_spec.rb24
-rw-r--r--spec/experiments/members/invite_email_experiment_spec.rb117
-rw-r--r--spec/experiments/new_project_readme_content_experiment_spec.rb4
-rw-r--r--spec/experiments/new_project_readme_experiment_spec.rb75
-rw-r--r--spec/factories/alert_management/alerts.rb6
-rw-r--r--spec/factories/ci/build_trace_section_names.rb8
-rw-r--r--spec/factories/ci/builds.rb22
-rw-r--r--spec/factories/ci/job_artifacts.rb70
-rw-r--r--spec/factories/ci/pending_builds.rb2
-rw-r--r--spec/factories/ci/reports/security/aggregated_reports.rb14
-rw-r--r--spec/factories/ci/reports/security/finding_keys.rb18
-rw-r--r--spec/factories/ci/reports/security/findings.rb54
-rw-r--r--spec/factories/ci/reports/security/locations/sast.rb23
-rw-r--r--spec/factories/ci/reports/security/reports.rb28
-rw-r--r--spec/factories/customer_relations/organizations.rb9
-rw-r--r--spec/factories/deploy_tokens.rb6
-rw-r--r--spec/factories/design_management/designs.rb2
-rw-r--r--spec/factories/environments.rb5
-rw-r--r--spec/factories/error_tracking/client_key.rb12
-rw-r--r--spec/factories/error_tracking/detailed_error.rb2
-rw-r--r--spec/factories/error_tracking/error.rb18
-rw-r--r--spec/factories/error_tracking/error_event.rb13
-rw-r--r--spec/factories/gitlab/database/async_indexes/postgres_async_index.rb9
-rw-r--r--spec/factories/incident_management/issuable_escalation_statuses.rb25
-rw-r--r--spec/factories/integrations.rb2
-rw-r--r--spec/factories/packages/debian/distribution.rb10
-rw-r--r--spec/factories/packages/debian/distribution_key.rb4
-rw-r--r--spec/factories/packages/debian/file_metadatum.rb22
-rw-r--r--spec/factories/project_error_tracking_settings.rb4
-rw-r--r--spec/factories/projects.rb10
-rw-r--r--spec/factories/projects/ci_feature_usages.rb9
-rw-r--r--spec/factories/sequences.rb1
-rw-r--r--spec/factories/usage_data.rb3
-rw-r--r--spec/factories/user_details.rb1
-rw-r--r--spec/factories/work_item/work_item_types.rb30
-rw-r--r--spec/factories_spec.rb1
-rw-r--r--spec/fast_spec_helper.rb8
-rw-r--r--spec/features/admin/admin_appearance_spec.rb2
-rw-r--r--spec/features/admin/admin_dev_ops_report_spec.rb4
-rw-r--r--spec/features/admin/admin_groups_spec.rb4
-rw-r--r--spec/features/admin/admin_manage_applications_spec.rb56
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb76
-rw-r--r--spec/features/admin/admin_mode_spec.rb217
-rw-r--r--spec/features/admin/admin_settings_spec.rb22
-rw-r--r--spec/features/admin/dashboard_spec.rb4
-rw-r--r--spec/features/admin/integrations/user_activates_mattermost_slash_command_spec.rb6
-rw-r--r--spec/features/admin/services/admin_visits_service_templates_spec.rb53
-rw-r--r--spec/features/admin/users/user_spec.rb37
-rw-r--r--spec/features/boards/boards_spec.rb791
-rw-r--r--spec/features/callouts/service_templates_deprecation_spec.rb59
-rw-r--r--spec/features/clusters/cluster_health_dashboard_spec.rb16
-rw-r--r--spec/features/cycle_analytics_spec.rb87
-rw-r--r--spec/features/dashboard/active_tab_spec.rb43
-rw-r--r--spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb72
-rw-r--r--spec/features/dashboard/projects_spec.rb23
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb98
-rw-r--r--spec/features/frequently_visited_projects_and_groups_spec.rb72
-rw-r--r--spec/features/groups/board_sidebar_spec.rb28
-rw-r--r--spec/features/groups/board_spec.rb73
-rw-r--r--spec/features/groups/integrations/user_activates_mattermost_slash_command_spec.rb5
-rw-r--r--spec/features/groups/members/manage_members_spec.rb54
-rw-r--r--spec/features/groups/packages_spec.rb2
-rw-r--r--spec/features/groups/settings/manage_applications_spec.rb16
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb2
-rw-r--r--spec/features/groups/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/groups_spec.rb2
-rw-r--r--spec/features/invites_spec.rb28
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/form_spec.rb36
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb4
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb6
-rw-r--r--spec/features/jira_connect/branches_spec.rb86
-rw-r--r--spec/features/labels_hierarchy_spec.rb76
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb8
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb20
-rw-r--r--spec/features/merge_request/user_sees_closing_issues_message_spec.rb16
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb32
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb2
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb32
-rw-r--r--spec/features/nav/top_nav_responsive_spec.rb2
-rw-r--r--spec/features/profile_spec.rb2
-rw-r--r--spec/features/profiles/user_manages_applications_spec.rb49
-rw-r--r--spec/features/project_variables_spec.rb2
-rw-r--r--spec/features/projects/activity/user_sees_design_activity_spec.rb18
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb969
-rw-r--r--spec/features/projects/ci/editor_spec.rb40
-rw-r--r--spec/features/projects/commit/mini_pipeline_graph_spec.rb3
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb12
-rw-r--r--spec/features/projects/environments/environment_spec.rb47
-rw-r--r--spec/features/projects/environments/environments_spec.rb4
-rw-r--r--spec/features/projects/environments_pod_logs_spec.rb2
-rw-r--r--spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb2
-rw-r--r--spec/features/projects/fork_spec.rb4
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb3
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb2
-rw-r--r--spec/features/projects/infrastructure_registry_spec.rb6
-rw-r--r--spec/features/projects/integrations/user_uses_inherited_settings_spec.rb91
-rw-r--r--spec/features/projects/members/invite_group_spec.rb4
-rw-r--r--spec/features/projects/new_project_spec.rb514
-rw-r--r--spec/features/projects/packages_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb201
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb22
-rw-r--r--spec/features/projects/services/user_activates_irker_spec.rb6
-rw-r--r--spec/features/projects/services/user_activates_pushover_spec.rb2
-rw-r--r--spec/features/projects/services/user_views_services_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb6
-rw-r--r--spec/features/projects/show/schema_markup_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb4
-rw-r--r--spec/features/projects/tags/user_edits_tags_spec.rb2
-rw-r--r--spec/features/projects/terraform_spec.rb23
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb4
-rw-r--r--spec/features/projects/tree/create_file_spec.rb4
-rw-r--r--spec/features/projects/tree/tree_show_spec.rb6
-rw-r--r--spec/features/projects/user_creates_project_spec.rb9
-rw-r--r--spec/features/projects_spec.rb16
-rw-r--r--spec/features/registrations/welcome_spec.rb21
-rw-r--r--spec/features/runners_spec.rb10
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb1
-rw-r--r--spec/features/users/show_spec.rb58
-rw-r--r--spec/finders/ci/pipelines_finder_spec.rb23
-rw-r--r--spec/finders/ci/runners_finder_spec.rb158
-rw-r--r--spec/finders/error_tracking/errors_finder_spec.rb28
-rw-r--r--spec/finders/group_members_finder_spec.rb6
-rw-r--r--spec/finders/groups_finder_spec.rb31
-rw-r--r--spec/finders/issues_finder_spec.rb162
-rw-r--r--spec/finders/lfs_pointers_finder_spec.rb44
-rw-r--r--spec/finders/merge_requests_finder_spec.rb12
-rw-r--r--spec/finders/packages/pypi/packages_finder_spec.rb10
-rw-r--r--spec/finders/projects/members/effective_access_level_per_user_finder_spec.rb38
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json42
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_nuget_dependency_link_metadata.json12
-rw-r--r--spec/fixtures/api/schemas/pipeline_schedule.json1
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/environments.json9
-rw-r--r--spec/fixtures/emails/no_content_with_quote.eml23
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml3
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml3
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml3
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_database_metric.rb17
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_generic_metric.rb (renamed from spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric.rb)5
-rw-r--r--spec/fixtures/packages/debian/distribution/InRelease8
-rw-r--r--spec/fixtures/private_key.asc17
-rw-r--r--spec/fixtures/public_key.asc15
-rw-r--r--spec/fixtures/security_reports/deprecated/gl-sast-report.json964
-rw-r--r--spec/fixtures/security_reports/feature-branch/gl-sast-report.json177
-rw-r--r--spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json5
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report-names.json168
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report.json160
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-missing-scanner.json802
-rw-r--r--spec/fixtures/trace/sample_trace2
-rw-r--r--spec/frontend/__helpers__/mock_dom_observer.js22
-rw-r--r--spec/frontend/__helpers__/mock_window_location_helper.js2
-rw-r--r--spec/frontend/__helpers__/set_window_location_helper.js75
-rw-r--r--spec/frontend/__helpers__/set_window_location_helper_spec.js161
-rw-r--r--spec/frontend/admin/analytics/devops_score/components/devops_score_callout_spec.js67
-rw-r--r--spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js27
-rw-r--r--spec/frontend/admin/analytics/devops_score/mock_data.js2
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_form_spec.js40
-rw-r--r--spec/frontend/admin/signup_restrictions/mock_data.js2
-rw-r--r--spec/frontend/admin/users/components/actions/actions_spec.js49
-rw-r--r--spec/frontend/admin/users/components/user_date_spec.js16
-rw-r--r--spec/frontend/admin/users/mock_data.js2
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js2
-rw-r--r--spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js12
-rw-r--r--spec/frontend/authentication/two_factor_auth/index_spec.js4
-rw-r--r--spec/frontend/authentication/webauthn/error_spec.js16
-rw-r--r--spec/frontend/authentication/webauthn/register_spec.js8
-rw-r--r--spec/frontend/blob/components/blob_header_default_actions_spec.js12
-rw-r--r--spec/frontend/blob/components/blob_header_spec.js12
-rw-r--r--spec/frontend/blob/csv/csv_viewer_spec.js13
-rw-r--r--spec/frontend/blob/viewer/index_spec.js2
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js24
-rw-r--r--spec/frontend/boards/board_list_helper.js6
-rw-r--r--spec/frontend/boards/board_list_spec.js116
-rw-r--r--spec/frontend/boards/components/board_card_spec.js1
-rw-r--r--spec/frontend/boards/components/board_filtered_search_spec.js6
-rw-r--r--spec/frontend/boards/components/board_form_spec.js9
-rw-r--r--spec/frontend/boards/components/board_new_issue_spec.js154
-rw-r--r--spec/frontend/boards/components/board_new_item_spec.js103
-rw-r--r--spec/frontend/boards/components/issue_board_filtered_search_spec.js31
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js2
-rw-r--r--spec/frontend/boards/mock_data.js47
-rw-r--r--spec/frontend/boards/stores/actions_spec.js274
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js40
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js23
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js17
-rw-r--r--spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js50
-rw-r--r--spec/frontend/commit/mock_data.js117
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap2
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap17
-rw-r--r--spec/frontend/content_editor/components/content_editor_error_spec.js54
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js166
-rw-r--r--spec/frontend/content_editor/components/editor_state_observer_spec.js75
-rw-r--r--spec/frontend/content_editor/components/formatting_bubble_menu_spec.js80
-rw-r--r--spec/frontend/content_editor/components/toolbar_button_spec.js46
-rw-r--r--spec/frontend/content_editor/components/toolbar_image_button_spec.js22
-rw-r--r--spec/frontend/content_editor/components/toolbar_link_button_spec.js52
-rw-r--r--spec/frontend/content_editor/components/toolbar_table_button_spec.js34
-rw-r--r--spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js18
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js35
-rw-r--r--spec/frontend/content_editor/extensions/attachment_spec.js235
-rw-r--r--spec/frontend/content_editor/extensions/code_block_highlight_spec.js3
-rw-r--r--spec/frontend/content_editor/extensions/emoji_spec.js57
-rw-r--r--spec/frontend/content_editor/extensions/hard_break_spec.js2
-rw-r--r--spec/frontend/content_editor/extensions/image_spec.js193
-rw-r--r--spec/frontend/content_editor/extensions/inline_diff_spec.js27
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec.js2
-rw-r--r--spec/frontend/content_editor/services/build_serializer_config_spec.js38
-rw-r--r--spec/frontend/content_editor/services/content_editor_spec.js68
-rw-r--r--spec/frontend/content_editor/services/create_content_editor_spec.js12
-rw-r--r--spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js8
-rw-r--r--spec/frontend/content_editor/services/upload_helpers_spec.js (renamed from spec/frontend/content_editor/services/upload_file_spec.js)4
-rw-r--r--spec/frontend/content_editor/test_utils.js7
-rw-r--r--spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap9
-rw-r--r--spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap28
-rw-r--r--spec/frontend/cycle_analytics/base_spec.js138
-rw-r--r--spec/frontend/cycle_analytics/mock_data.js154
-rw-r--r--spec/frontend/cycle_analytics/stage_nav_item_spec.js152
-rw-r--r--spec/frontend/cycle_analytics/stage_table_spec.js279
-rw-r--r--spec/frontend/cycle_analytics/store/actions_spec.js173
-rw-r--r--spec/frontend/cycle_analytics/store/getters_spec.js3
-rw-r--r--spec/frontend/cycle_analytics/store/mutations_spec.js104
-rw-r--r--spec/frontend/cycle_analytics/total_time_component_spec.js61
-rw-r--r--spec/frontend/cycle_analytics/utils_spec.js80
-rw-r--r--spec/frontend/cycle_analytics/value_stream_metrics_spec.js128
-rw-r--r--spec/frontend/design_management/components/design_notes/design_discussion_spec.js38
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap96
-rw-r--r--spec/frontend/design_management/components/upload/design_version_dropdown_spec.js11
-rw-r--r--spec/frontend/design_management/components/upload/mock_data/all_versions.js16
-rw-r--r--spec/frontend/design_management/mock_data/all_versions.js14
-rw-r--r--spec/frontend/design_management/mock_data/apollo_mock.js37
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap2
-rw-r--r--spec/frontend/diffs/components/app_spec.js52
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js25
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js50
-rw-r--r--spec/frontend/diffs/components/settings_dropdown_spec.js8
-rw-r--r--spec/frontend/diffs/store/actions_spec.js9
-rw-r--r--spec/frontend/diffs/store/getters_versions_dropdowns_spec.js10
-rw-r--r--spec/frontend/diffs/utils/queue_events_spec.js36
-rw-r--r--spec/frontend/editor/source_editor_extension_base_spec.js8
-rw-r--r--spec/frontend/editor/source_editor_markdown_ext_spec.js385
-rw-r--r--spec/frontend/editor/utils_spec.js85
-rw-r--r--spec/frontend/environment.js23
-rw-r--r--spec/frontend/environments/confirm_rollback_modal_spec.js138
-rw-r--r--spec/frontend/environments/edit_environment_spec.js104
-rw-r--r--spec/frontend/environments/environment_form_spec.js105
-rw-r--r--spec/frontend/environments/environment_item_spec.js125
-rw-r--r--spec/frontend/environments/environments_app_spec.js50
-rw-r--r--spec/frontend/environments/environments_detail_header_spec.js238
-rw-r--r--spec/frontend/environments/mock_data.js22
-rw-r--r--spec/frontend/environments/new_environment_spec.js100
-rw-r--r--spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js5
-rw-r--r--spec/frontend/feature_flags/mock_data.js2
-rw-r--r--spec/frontend/fixtures/analytics.rb71
-rw-r--r--spec/frontend/fixtures/api_markdown.rb9
-rw-r--r--spec/frontend/fixtures/api_markdown.yml31
-rw-r--r--spec/frontend/fixtures/startup_css.rb24
-rw-r--r--spec/frontend/graphql_shared/utils_spec.js15
-rw-r--r--spec/frontend/groups/components/group_item_spec.js5
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js10
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js120
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js (renamed from spec/frontend/import_entities/import_groups/components/import_table_row_spec.js)155
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js88
-rw-r--r--spec/frontend/integrations/edit/components/dynamic_field_spec.js13
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js6
-rw-r--r--spec/frontend/integrations/overrides/components/integration_overrides_spec.js146
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js198
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js54
-rw-r--r--spec/frontend/issue_show/components/app_spec.js3
-rw-r--r--spec/frontend/issue_show/components/fields/type_spec.js14
-rw-r--r--spec/frontend/issue_show/issue_spec.js3
-rw-r--r--spec/frontend/issues_list/components/issuables_list_app_spec.js31
-rw-r--r--spec/frontend/issues_list/components/issue_card_time_info_spec.js21
-rw-r--r--spec/frontend/issues_list/components/issues_list_app_spec.js66
-rw-r--r--spec/frontend/issues_list/components/jira_issues_import_status_app_spec.js6
-rw-r--r--spec/frontend/issues_list/mock_data.js11
-rw-r--r--spec/frontend/issues_list/utils_spec.js21
-rw-r--r--spec/frontend/jira_connect/branches/components/new_branch_form_spec.js236
-rw-r--r--spec/frontend/jira_connect/branches/pages/index_spec.js65
-rw-r--r--spec/frontend/jira_connect/subscriptions/api_spec.js (renamed from spec/frontend/jira_connect/api_spec.js)6
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap (renamed from spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap)0
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/app_spec.js (renamed from spec/frontend/jira_connect/components/app_spec.js)8
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/group_item_name_spec.js (renamed from spec/frontend/jira_connect/components/group_item_name_spec.js)2
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/groups_list_item_spec.js (renamed from spec/frontend/jira_connect/components/groups_list_item_spec.js)10
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/groups_list_spec.js (renamed from spec/frontend/jira_connect/components/groups_list_spec.js)10
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/subscriptions_list_spec.js (renamed from spec/frontend/jira_connect/components/subscriptions_list_spec.js)12
-rw-r--r--spec/frontend/jira_connect/subscriptions/index_spec.js (renamed from spec/frontend/jira_connect/index_spec.js)4
-rw-r--r--spec/frontend/jira_connect/subscriptions/mock_data.js (renamed from spec/frontend/jira_connect/mock_data.js)0
-rw-r--r--spec/frontend/jira_connect/subscriptions/store/mutations_spec.js (renamed from spec/frontend/jira_connect/store/mutations_spec.js)4
-rw-r--r--spec/frontend/jira_connect/subscriptions/utils_spec.js (renamed from spec/frontend/jira_connect/utils_spec.js)4
-rw-r--r--spec/frontend/jobs/components/log/mock_data.js9
-rw-r--r--spec/frontend/jobs/components/stages_dropdown_spec.js13
-rw-r--r--spec/frontend/jobs/store/utils_spec.js16
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js115
-rw-r--r--spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/remove_member_button_spec.js27
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js42
-rw-r--r--spec/frontend/members/components/filter_sort/sort_dropdown_spec.js30
-rw-r--r--spec/frontend/members/components/members_tabs_spec.js33
-rw-r--r--spec/frontend/members/components/modals/remove_member_modal_spec.js (renamed from spec/frontend/vue_shared/components/remove_member_modal_spec.js)87
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js12
-rw-r--r--spec/frontend/members/mock_data.js9
-rw-r--r--spec/frontend/members/store/actions_spec.js32
-rw-r--r--spec/frontend/members/store/mutations_spec.js30
-rw-r--r--spec/frontend/members/utils_spec.js17
-rw-r--r--spec/frontend/monitoring/components/dashboard_actions_menu_spec.js3
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js40
-rw-r--r--spec/frontend/monitoring/utils_spec.js2
-rw-r--r--spec/frontend/nav/components/responsive_app_spec.js50
-rw-r--r--spec/frontend/notes/components/comment_field_layout_spec.js14
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js27
-rw-r--r--spec/frontend/packages/details/components/app_spec.js10
-rw-r--r--spec/frontend/packages/list/components/packages_list_app_spec.js15
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap36
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap36
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap30
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap135
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap36
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap36
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap197
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap48
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap101
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js130
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js448
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/composer_installation_spec.js118
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/conan_installation_spec.js65
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/dependency_row_spec.js69
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/file_sha_spec.js33
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/installation_title_spec.js58
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/installations_commands_spec.js64
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/maven_installation_spec.js213
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js122
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/nuget_installation_spec.js75
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js272
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js122
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js202
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js80
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js89
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js251
-rw-r--r--spec/frontend/packages_and_registries/package_registry/utils_spec.js23
-rw-r--r--spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js58
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js68
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js7
-rw-r--r--spec/frontend/persistent_user_callout_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js8
-rw-r--r--spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js9
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js8
-rw-r--r--spec/frontend/pipelines/components/pipelines_filtered_search_spec.js11
-rw-r--r--spec/frontend/pipelines/graph/graph_component_legacy_spec.js300
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js4
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js183
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_legacy_spec.js40
-rw-r--r--spec/frontend/pipelines/graph/mock_data_legacy.js261
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js130
-rw-r--r--spec/frontend/pipelines/graph_shared/links_inner_spec.js2
-rw-r--r--spec/frontend/pipelines/graph_shared/links_layer_spec.js145
-rw-r--r--spec/frontend/pipelines/header_component_spec.js18
-rw-r--r--spec/frontend/pipelines/mock_data.js22
-rw-r--r--spec/frontend/pipelines/parsing_utils_spec.js6
-rw-r--r--spec/frontend/pipelines/pipeline_details_mediator_spec.js36
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js8
-rw-r--r--spec/frontend/pipelines/pipeline_store_spec.js27
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js1
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js27
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js4
-rw-r--r--spec/frontend/pipelines/stores/pipeline_store_spec.js135
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_source_token_spec.js50
-rw-r--r--spec/frontend/popovers/components/popovers_spec.js9
-rw-r--r--spec/frontend/profile/preferences/components/profile_preferences_spec.js17
-rw-r--r--spec/frontend/projects/compare/components/app_legacy_spec.js159
-rw-r--r--spec/frontend/projects/terraform_notification/terraform_notification_spec.js27
-rw-r--r--spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js53
-rw-r--r--spec/frontend/registry/explorer/components/details_page/details_header_spec.js28
-rw-r--r--spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js4
-rw-r--r--spec/frontend/registry/explorer/mock_data.js1
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js4
-rw-r--r--spec/frontend/registry/explorer/stubs.js5
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js7
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js8
-rw-r--r--spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js28
-rw-r--r--spec/frontend/reports/codequality_report/store/actions_spec.js80
-rw-r--r--spec/frontend/reports/codequality_report/store/getters_spec.js7
-rw-r--r--spec/frontend/reports/codequality_report/store/mutations_spec.js17
-rw-r--r--spec/frontend/repository/components/blob_button_group_spec.js5
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js124
-rw-r--r--spec/frontend/repository/components/blob_edit_spec.js22
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js29
-rw-r--r--spec/frontend/repository/components/delete_blob_modal_spec.js119
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js (renamed from spec/frontend/runner/runner_list/runner_list_app_spec.js)29
-rw-r--r--spec/frontend/runner/components/runner_registration_token_reset_spec.js73
-rw-r--r--spec/frontend/runner/components/runner_type_alert_spec.js8
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js34
-rw-r--r--spec/frontend/runner/runner_search_utils_spec.js (renamed from spec/frontend/runner/runner_list/runner_search_utils_spec.js)2
-rw-r--r--spec/frontend/search/index_spec.js22
-rw-r--r--spec/frontend/search/mock_data.js15
-rw-r--r--spec/frontend/search/store/actions_spec.js79
-rw-r--r--spec/frontend/search/store/utils_spec.js86
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js19
-rw-r--r--spec/frontend/search/topbar/components/group_filter_spec.js34
-rw-r--r--spec/frontend/search/topbar/components/project_filter_spec.js34
-rw-r--r--spec/frontend/security_configuration/app_spec.js27
-rw-r--r--spec/frontend/security_configuration/components/app_spec.js (renamed from spec/frontend/security_configuration/components/redesigned_app_spec.js)94
-rw-r--r--spec/frontend/security_configuration/components/auto_dev_ops_enabled_alert_spec.js46
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js34
-rw-r--r--spec/frontend/security_configuration/components/upgrade_banner_spec.js4
-rw-r--r--spec/frontend/security_configuration/configuration_table_spec.js52
-rw-r--r--spec/frontend/security_configuration/upgrade_spec.js30
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap19
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js4
-rw-r--r--spec/frontend/self_monitor/store/actions_spec.js4
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js27
-rw-r--r--spec/frontend/sidebar/components/severity/sidebar_severity_spec.js57
-rw-r--r--spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js14
-rw-r--r--spec/frontend/sidebar/components/time_tracking/mock_data.js10
-rw-r--r--spec/frontend/sidebar/components/time_tracking/report_spec.js2
-rw-r--r--spec/frontend/sidebar/mock_data.js55
-rw-r--r--spec/frontend/snippets/components/show_spec.js4
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js162
-rw-r--r--spec/frontend/syntax_highlight_spec.js63
-rw-r--r--spec/frontend/terraform/components/empty_state_spec.js19
-rw-r--r--spec/frontend/terraform/components/init_command_modal_spec.js79
-rw-r--r--spec/frontend/terraform/components/states_table_actions_spec.js28
-rw-r--r--spec/frontend/test_setup.js11
-rw-r--r--spec/frontend/token_access/token_access_spec.js17
-rw-r--r--spec/frontend/tooltips/components/tooltips_spec.js9
-rw-r--r--spec/frontend/tracking_spec.js41
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js101
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js21
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap136
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap3
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js42
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js23
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js41
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js3
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js8
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js19
-rw-r--r--spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js97
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js28
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js42
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/papa_parse_alert_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js15
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js434
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js25
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js88
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/url_sync_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js8
-rw-r--r--spec/frontend/vue_shared/directives/autofocusonshow_spec.js3
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js2
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_spec.js5
-rw-r--r--spec/frontend_integration/ide/helpers/start.js5
-rw-r--r--spec/graphql/features/authorization_spec.rb6
-rw-r--r--spec/graphql/gitlab_schema_spec.rb89
-rw-r--r--spec/graphql/mutations/base_mutation_spec.rb56
-rw-r--r--spec/graphql/mutations/ci/runner/delete_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb2
-rw-r--r--spec/graphql/mutations/design_management/delete_spec.rb39
-rw-r--r--spec/graphql/mutations/groups/update_spec.rb74
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb22
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb38
-rw-r--r--spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/concerns/resolves_ids_spec.rb43
-rw-r--r--spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb2
-rw-r--r--spec/graphql/resolvers/echo_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/error_tracking/sentry_detailed_error_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/groups_resolver_spec.rb133
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb72
-rw-r--r--spec/graphql/resolvers/merge_requests_count_resolver_spec.rb45
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb23
-rw-r--r--spec/graphql/resolvers/namespace_projects_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/paginated_tree_resolver_spec.rb102
-rw-r--r--spec/graphql/resolvers/project_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/terraform/states_resolver_spec.rb3
-rw-r--r--spec/graphql/resolvers/timelog_resolver_spec.rb309
-rw-r--r--spec/graphql/resolvers/user_discussions_count_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/user_notes_count_resolver_spec.rb2
-rw-r--r--spec/graphql/subscriptions/issuable_updated_spec.rb2
-rw-r--r--spec/graphql/types/base_argument_spec.rb38
-rw-r--r--spec/graphql/types/base_field_spec.rb32
-rw-r--r--spec/graphql/types/global_id_type_spec.rb4
-rw-r--r--spec/graphql/types/group_type_spec.rb11
-rw-r--r--spec/graphql/types/issue_type_spec.rb2
-rw-r--r--spec/graphql/types/merge_requests/reviewer_type_spec.rb2
-rw-r--r--spec/graphql/types/namespace_type_spec.rb2
-rw-r--r--spec/graphql/types/packages/nuget/dependency_link_metdatum_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/package_dependency_link_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/package_dependency_type_enum_spec.rb9
-rw-r--r--spec/graphql/types/packages/package_dependency_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/package_details_type_spec.rb2
-rw-r--r--spec/graphql/types/permission_types/base_permission_type_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb11
-rw-r--r--spec/graphql/types/query_type_spec.rb11
-rw-r--r--spec/graphql/types/range_input_type_spec.rb6
-rw-r--r--spec/graphql/types/repository_type_spec.rb2
-rw-r--r--spec/graphql/types/timelog_type_spec.rb2
-rw-r--r--spec/graphql/types/user_type_spec.rb11
-rw-r--r--spec/helpers/admin/user_actions_helper_spec.rb2
-rw-r--r--spec/helpers/application_helper_spec.rb19
-rw-r--r--spec/helpers/application_settings_helper_spec.rb32
-rw-r--r--spec/helpers/button_helper_spec.rb6
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb8
-rw-r--r--spec/helpers/clusters_helper_spec.rb4
-rw-r--r--spec/helpers/environment_helper_spec.rb37
-rw-r--r--spec/helpers/environments_helper_spec.rb9
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb30
-rw-r--r--spec/helpers/groups_helper_spec.rb91
-rw-r--r--spec/helpers/invite_members_helper_spec.rb112
-rw-r--r--spec/helpers/issuables_description_templates_helper_spec.rb27
-rw-r--r--spec/helpers/issues_helper_spec.rb76
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb6
-rw-r--r--spec/helpers/nav_helper_spec.rb10
-rw-r--r--spec/helpers/packages_helper_spec.rb21
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb137
-rw-r--r--spec/helpers/projects/terraform_helper_spec.rb12
-rw-r--r--spec/helpers/projects_helper_spec.rb62
-rw-r--r--spec/helpers/snippets_helper_spec.rb19
-rw-r--r--spec/helpers/time_zone_helper_spec.rb20
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb28
-rw-r--r--spec/initializers/00_rails_disable_joins_spec.rb288
-rw-r--r--spec/initializers/0_log_deprecations_spec.rb74
-rw-r--r--spec/initializers/database_config_spec.rb57
-rw-r--r--spec/initializers/lograge_spec.rb3
-rw-r--r--spec/initializers/rails_asset_host_spec.rb38
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js95
-rw-r--r--spec/javascripts/monitoring/fixture_data.js1
-rw-r--r--spec/javascripts/monitoring/mock_data.js5
-rw-r--r--spec/javascripts/monitoring/store_utils.js1
-rw-r--r--spec/lib/api/ci/helpers/runner_helpers_spec.rb (renamed from spec/lib/api/helpers/runner_helpers_spec.rb)4
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb (renamed from spec/lib/api/helpers/runner_spec.rb)4
-rw-r--r--spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb62
-rw-r--r--spec/lib/api/helpers_spec.rb36
-rw-r--r--spec/lib/backup/database_backup_error_spec.rb30
-rw-r--r--spec/lib/backup/file_backup_error_spec.rb35
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb10
-rw-r--r--spec/lib/backup/manager_spec.rb81
-rw-r--r--spec/lib/backup/repository_backup_error_spec.rb42
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb29
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb30
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb5
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb30
-rw-r--r--spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb38
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb46
-rw-r--r--spec/lib/banzai/pipeline/wiki_pipeline_spec.rb19
-rw-r--r--spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb32
-rw-r--r--spec/lib/extracts_path_spec.rb16
-rw-r--r--spec/lib/feature/gitaly_spec.rb2
-rw-r--r--spec/lib/feature_spec.rb4
-rw-r--r--spec/lib/generators/gitlab/usage_metric_generator_spec.rb38
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb50
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb12
-rw-r--r--spec/lib/gitlab/auth/result_spec.rb79
-rw-r--r--spec/lib/gitlab/auth_spec.rb196
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb9
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb56
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb51
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb11
-rw-r--r--spec/lib/gitlab/background_migration_spec.rb66
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb3
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb201
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb3
-rw-r--r--spec/lib/gitlab/checks/single_change_access_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/auto_retry_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb90
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules_spec.rb98
-rw-r--r--spec/lib/gitlab/ci/config/entry/include_spec.rb40
-rw-r--r--spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/config/entry/jobs_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb88
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb350
-rw-r--r--spec/lib/gitlab/ci/parsers/security/sast_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb40
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/reports/security/aggregated_report_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/reports/security/finding_key_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/reports/security/finding_signature_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/reports/security/locations/sast_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/reports/security/locations/secret_detection_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/reports/security/report_spec.rb224
-rw-r--r--spec/lib/gitlab/ci/reports/security/reports_spec.rb113
-rw-r--r--spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb163
-rw-r--r--spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/npm_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb116
-rw-r--r--spec/lib/gitlab/config_checker/external_database_checker_spec.rb6
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb35
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb52
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb1
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb85
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_creator_spec.rb50
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb176
-rw-r--r--spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb17
-rw-r--r--spec/lib/gitlab/database/async_indexes_spec.rb23
-rw-r--r--spec/lib/gitlab/database/connection_spec.rb467
-rw-r--r--spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb30
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_list_spec.rb84
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_spec.rb15
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb247
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb9
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb67
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb16
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb150
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb108
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb97
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb47
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_details_spec.rb11
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb12
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb8
-rw-r--r--spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb8
-rw-r--r--spec/lib/gitlab/database/multi_threaded_migration_spec.rb43
-rw-r--r--spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb181
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb20
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb143
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb12
-rw-r--r--spec/lib/gitlab/database/postgres_foreign_key_spec.rb41
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb31
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb29
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb61
-rw-r--r--spec/lib/gitlab/database/transaction/context_spec.rb144
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb57
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb10
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb11
-rw-r--r--spec/lib/gitlab/database_spec.rb416
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb54
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb45
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb47
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb40
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb45
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb20
-rw-r--r--spec/lib/gitlab/email/reply_parser_spec.rb16
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb18
-rw-r--r--spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb2
-rw-r--r--spec/lib/gitlab/etag_caching/router/restful_spec.rb8
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb16
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb16
-rw-r--r--spec/lib/gitlab/fake_application_settings_spec.rb16
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb88
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb12
-rw-r--r--spec/lib/gitlab/git/commit_stats_spec.rb30
-rw-r--r--spec/lib/gitlab/git/conflict/file_spec.rb16
-rw-r--r--spec/lib/gitlab/git/remote_mirror_spec.rb25
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb202
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb83
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb86
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb19
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb113
-rw-r--r--spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb25
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb25
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb11
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb62
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb153
-rw-r--r--spec/lib/gitlab/github_import/bulk_importing_spec.rb76
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb26
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb120
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb59
-rw-r--r--spec/lib/gitlab/github_import/logger_spec.rb41
-rw-r--r--spec/lib/gitlab/github_import/object_counter_spec.rb18
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb146
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb29
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb23
-rw-r--r--spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/copy_field_description_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/mount_mutation_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/negatable_arguments_spec.rb6
-rw-r--r--spec/lib/gitlab/graphql/pagination/connections_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/present/field_extension_spec.rb16
-rw-r--r--spec/lib/gitlab/graphql/queries_spec.rb24
-rw-r--r--spec/lib/gitlab/highlight_spec.rb7
-rw-r--r--spec/lib/gitlab/http_spec.rb62
-rw-r--r--spec/lib/gitlab/import/database_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/import/import_failure_service_spec.rb136
-rw-r--r--spec/lib/gitlab/import/logger_spec.rb39
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml6
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb35
-rw-r--r--spec/lib/gitlab/import_export/members_mapper_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb31
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb8
-rw-r--r--spec/lib/gitlab/jira_import/issue_serializer_spec.rb13
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb6
-rw-r--r--spec/lib/gitlab/kas_spec.rb32
-rw-r--r--spec/lib/gitlab/kubernetes/default_namespace_spec.rb30
-rw-r--r--spec/lib/gitlab/kubernetes/kubeconfig/entry/cluster_spec.rb23
-rw-r--r--spec/lib/gitlab/kubernetes/kubeconfig/entry/context_spec.rb23
-rw-r--r--spec/lib/gitlab/kubernetes/kubeconfig/entry/user_spec.rb14
-rw-r--r--spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb84
-rw-r--r--spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb62
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb7
-rw-r--r--spec/lib/gitlab/middleware/multipart_spec.rb3
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb4
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb87
-rw-r--r--spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb6
-rw-r--r--spec/lib/gitlab/search_results_spec.rb4
-rw-r--r--spec/lib/gitlab/setup_helper/workhorse_spec.rb24
-rw-r--r--spec/lib/gitlab/sidekiq_cluster/cli_spec.rb23
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_router_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb57
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb1
-rw-r--r--spec/lib/gitlab/sql/cte_spec.rb8
-rw-r--r--spec/lib/gitlab/sql/glob_spec.rb4
-rw-r--r--spec/lib/gitlab/sql/recursive_cte_spec.rb12
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/docs/helper_spec.rb79
-rw-r--r--spec/lib/gitlab/usage/docs/renderer_spec.rb24
-rw-r--r--spec/lib/gitlab/usage/docs/value_formatter_spec.rb26
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb12
-rw-r--r--spec/lib/gitlab/usage/metric_spec.rb51
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb68
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb72
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb23
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb10
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb44
-rw-r--r--spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb25
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb16
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/global_spec.rb1
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb1
-rw-r--r--spec/lib/gitlab/x509/tag_spec.rb20
-rw-r--r--spec/lib/peek/views/active_record_spec.rb57
-rw-r--r--spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb40
-rw-r--r--spec/lib/sidebars/groups/menus/group_information_menu_spec.rb64
-rw-r--r--spec/lib/sidebars/groups/menus/issues_menu_spec.rb54
-rw-r--r--spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb32
-rw-r--r--spec/lib/sidebars/groups/menus/merge_requests_menu_spec.rb36
-rw-r--r--spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb163
-rw-r--r--spec/lib/sidebars/groups/menus/settings_menu_spec.rb97
-rw-r--r--spec/lib/sidebars/menu_spec.rb8
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb22
-rw-r--r--spec/mailers/emails/pipelines_spec.rb41
-rw-r--r--spec/mailers/emails/projects_spec.rb21
-rw-r--r--spec/mailers/notify_spec.rb41
-rw-r--r--spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb31
-rw-r--r--spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb66
-rw-r--r--spec/migrations/20210804150320_create_base_work_item_types_spec.rb22
-rw-r--r--spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb137
-rw-r--r--spec/migrations/add_triggers_to_integrations_type_new_spec.rb65
-rw-r--r--spec/migrations/associate_existing_dast_builds_with_variables_spec.rb76
-rw-r--r--spec/migrations/backfill_integrations_type_new_spec.rb38
-rw-r--r--spec/migrations/backfill_issues_upvotes_count_spec.rb2
-rw-r--r--spec/migrations/confirm_security_bot_spec.rb38
-rw-r--r--spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb42
-rw-r--r--spec/migrations/orphaned_invite_tokens_cleanup_spec.rb32
-rw-r--r--spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb (renamed from spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb)2
-rw-r--r--spec/migrations/reschedule_delete_orphaned_deployments_spec.rb (renamed from spec/migrations/schedule_delete_orphaned_deployments_spec.rb)27
-rw-r--r--spec/migrations/reset_job_token_scope_enabled_again_spec.rb25
-rw-r--r--spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb59
-rw-r--r--spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb52
-rw-r--r--spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb127
-rw-r--r--spec/migrations/schedule_security_setting_creation_spec.rb58
-rw-r--r--spec/models/alert_management/alert_spec.rb285
-rw-r--r--spec/models/analytics/cycle_analytics/stage_event_hash_spec.rb47
-rw-r--r--spec/models/application_record_spec.rb77
-rw-r--r--spec/models/application_setting_spec.rb29
-rw-r--r--spec/models/ci/build_spec.rb209
-rw-r--r--spec/models/ci/build_trace_metadata_spec.rb10
-rw-r--r--spec/models/ci/build_trace_section_name_spec.rb14
-rw-r--r--spec/models/ci/build_trace_section_spec.rb13
-rw-r--r--spec/models/ci/build_trace_spec.rb10
-rw-r--r--spec/models/ci/pending_build_spec.rb28
-rw-r--r--spec/models/ci/pipeline_spec.rb50
-rw-r--r--spec/models/ci/resource_spec.rb52
-rw-r--r--spec/models/ci/runner_namespace_spec.rb6
-rw-r--r--spec/models/ci/runner_project_spec.rb6
-rw-r--r--spec/models/commit_status_spec.rb9
-rw-r--r--spec/models/concerns/case_sensitivity_spec.rb2
-rw-r--r--spec/models/concerns/ci/has_status_spec.rb12
-rw-r--r--spec/models/concerns/each_batch_spec.rb13
-rw-r--r--spec/models/concerns/has_integrations_spec.rb8
-rw-r--r--spec/models/concerns/limitable_spec.rb58
-rw-r--r--spec/models/concerns/sortable_spec.rb8
-rw-r--r--spec/models/concerns/spammable_spec.rb4
-rw-r--r--spec/models/concerns/strip_attribute_spec.rb6
-rw-r--r--spec/models/concerns/vulnerability_finding_signature_helpers_spec.rb32
-rw-r--r--spec/models/concerns/where_composite_spec.rb4
-rw-r--r--spec/models/container_repository_spec.rb2
-rw-r--r--spec/models/customer_relations/organization_spec.rb38
-rw-r--r--spec/models/deploy_token_spec.rb70
-rw-r--r--spec/models/diff_discussion_spec.rb15
-rw-r--r--spec/models/discussion_spec.rb8
-rw-r--r--spec/models/environment_spec.rb28
-rw-r--r--spec/models/error_tracking/client_key_spec.rb19
-rw-r--r--spec/models/error_tracking/error_event_spec.rb31
-rw-r--r--spec/models/error_tracking/error_spec.rb14
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb43
-rw-r--r--spec/models/event_spec.rb6
-rw-r--r--spec/models/experiment_spec.rb4
-rw-r--r--spec/models/gpg_signature_spec.rb6
-rw-r--r--spec/models/group_deploy_token_spec.rb41
-rw-r--r--spec/models/group_spec.rb21
-rw-r--r--spec/models/hooks/system_hook_spec.rb15
-rw-r--r--spec/models/hooks/web_hook_spec.rb59
-rw-r--r--spec/models/incident_management/issuable_escalation_status_spec.rb22
-rw-r--r--spec/models/instance_configuration_spec.rb89
-rw-r--r--spec/models/integration_spec.rb413
-rw-r--r--spec/models/integrations/bamboo_spec.rb15
-rw-r--r--spec/models/integrations/datadog_spec.rb49
-rw-r--r--spec/models/integrations/jenkins_spec.rb16
-rw-r--r--spec/models/integrations/jira_spec.rb62
-rw-r--r--spec/models/integrations/teamcity_spec.rb14
-rw-r--r--spec/models/internal_id_spec.rb6
-rw-r--r--spec/models/issue_spec.rb111
-rw-r--r--spec/models/jira_connect_installation_spec.rb3
-rw-r--r--spec/models/member_spec.rb101
-rw-r--r--spec/models/members/group_member_spec.rb12
-rw-r--r--spec/models/members/project_member_spec.rb167
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb4
-rw-r--r--spec/models/merge_request_diff_spec.rb9
-rw-r--r--spec/models/merge_request_spec.rb38
-rw-r--r--spec/models/milestone_spec.rb9
-rw-r--r--spec/models/namespace_setting_spec.rb85
-rw-r--r--spec/models/namespace_spec.rb79
-rw-r--r--spec/models/note_spec.rb20
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb22
-rw-r--r--spec/models/packages/npm_spec.rb24
-rw-r--r--spec/models/packages/package_file_spec.rb67
-rw-r--r--spec/models/packages/package_spec.rb161
-rw-r--r--spec/models/personal_access_token_spec.rb8
-rw-r--r--spec/models/postgresql/detached_partition_spec.rb18
-rw-r--r--spec/models/postgresql/replication_slot_spec.rb67
-rw-r--r--spec/models/project_feature_spec.rb58
-rw-r--r--spec/models/project_feature_usage_spec.rb8
-rw-r--r--spec/models/project_spec.rb324
-rw-r--r--spec/models/project_statistics_spec.rb6
-rw-r--r--spec/models/project_team_spec.rb32
-rw-r--r--spec/models/projects/ci_feature_usage_spec.rb44
-rw-r--r--spec/models/release_highlight_spec.rb10
-rw-r--r--spec/models/remote_mirror_spec.rb108
-rw-r--r--spec/models/repository_spec.rb180
-rw-r--r--spec/models/snippet_spec.rb4
-rw-r--r--spec/models/timelog_spec.rb24
-rw-r--r--spec/models/tree_spec.rb8
-rw-r--r--spec/models/user_detail_spec.rb5
-rw-r--r--spec/models/user_spec.rb114
-rw-r--r--spec/models/users/banned_user_spec.rb21
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb6
-rw-r--r--spec/models/work_item/type_spec.rb53
-rw-r--r--spec/policies/issue_policy_spec.rb15
-rw-r--r--spec/policies/project_policy_spec.rb118
-rw-r--r--spec/policies/release_policy_spec.rb23
-rw-r--r--spec/presenters/group_clusterable_presenter_spec.rb16
-rw-r--r--spec/presenters/project_clusterable_presenter_spec.rb16
-rw-r--r--spec/presenters/sentry_error_presenter_spec.rb2
-rw-r--r--spec/requests/api/bulk_imports_spec.rb42
-rw-r--r--spec/requests/api/ci/jobs_spec.rb (renamed from spec/requests/api/jobs_spec.rb)20
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb104
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb28
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb64
-rw-r--r--spec/requests/api/ci/runners_spec.rb28
-rw-r--r--spec/requests/api/ci/triggers_spec.rb (renamed from spec/requests/api/triggers_spec.rb)2
-rw-r--r--spec/requests/api/ci/variables_spec.rb (renamed from spec/requests/api/variables_spec.rb)2
-rw-r--r--spec/requests/api/debian_group_packages_spec.rb31
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb31
-rw-r--r--spec/requests/api/environments_spec.rb2
-rw-r--r--spec/requests/api/error_tracking_collector_spec.rb32
-rw-r--r--spec/requests/api/error_tracking_spec.rb16
-rw-r--r--spec/requests/api/feature_flags_spec.rb12
-rw-r--r--spec/requests/api/generic_packages_spec.rb2
-rw-r--r--spec/requests/api/go_proxy_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb33
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb38
-rw-r--r--spec/requests/api/graphql/current_user_query_spec.rb11
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb25
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_cancel_spec.rb45
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_unschedule_spec.rb48
-rw-r--r--spec/requests/api/graphql/mutations/groups/update_spec.rb66
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb83
-rw-r--r--spec/requests/api/graphql/mutations/packages/destroy_file_spec.rb93
-rw-r--r--spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb2
-rw-r--r--spec/requests/api/graphql/packages/nuget_spec.rb33
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb13
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb40
-rw-r--r--spec/requests/api/graphql/project/repository_spec.rb22
-rw-r--r--spec/requests/api/graphql_spec.rb2
-rw-r--r--spec/requests/api/group_debian_distributions_spec.rb44
-rw-r--r--spec/requests/api/groups_spec.rb58
-rw-r--r--spec/requests/api/invitations_spec.rb14
-rw-r--r--spec/requests/api/maven_packages_spec.rb94
-rw-r--r--spec/requests/api/members_spec.rb47
-rw-r--r--spec/requests/api/namespaces_spec.rb13
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb43
-rw-r--r--spec/requests/api/project_attributes.yml4
-rw-r--r--spec/requests/api/project_milestones_spec.rb2
-rw-r--r--spec/requests/api/projects_spec.rb309
-rw-r--r--spec/requests/api/pypi_packages_spec.rb20
-rw-r--r--spec/requests/api/releases_spec.rb2
-rw-r--r--spec/requests/api/repositories_spec.rb12
-rw-r--r--spec/requests/api/rubygem_packages_spec.rb2
-rw-r--r--spec/requests/api/statistics_spec.rb2
-rw-r--r--spec/requests/api/terraform/modules/v1/packages_spec.rb2
-rw-r--r--spec/requests/api/user_counts_spec.rb38
-rw-r--r--spec/requests/api/v3/github_spec.rb11
-rw-r--r--spec/requests/customers_dot/proxy_controller_spec.rb37
-rw-r--r--spec/requests/git_http_spec.rb4
-rw-r--r--spec/requests/jwt_controller_spec.rb32
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb18
-rw-r--r--spec/requests/projects/merge_requests/diffs_spec.rb73
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb72
-rw-r--r--spec/requests/projects/merge_requests_spec.rb159
-rw-r--r--spec/requests/robots_txt_spec.rb1
-rw-r--r--spec/rubocop/cop/gitlab/bulk_insert_spec.rb12
-rw-r--r--spec/rubocop/cop/gitlab/json_spec.rb15
-rw-r--r--spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb6
-rw-r--r--spec/rubocop/cop/graphql/descriptions_spec.rb118
-rw-r--r--spec/rubocop/cop/graphql/id_type_spec.rb10
-rw-r--r--spec/rubocop/cop/graphql/json_type_spec.rb4
-rw-r--r--spec/rubocop/cop/graphql/old_types_spec.rb84
-rw-r--r--spec/rubocop/cop/ignored_columns_spec.rb78
-rw-r--r--spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb1
-rw-r--r--spec/rubocop/cop/qa/selector_usage_spec.rb49
-rw-r--r--spec/serializers/build_details_entity_spec.rb1
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb2
-rw-r--r--spec/serializers/diff_file_entity_spec.rb4
-rw-r--r--spec/serializers/diff_file_metadata_entity_spec.rb10
-rw-r--r--spec/serializers/diffs_entity_spec.rb22
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb58
-rw-r--r--spec/serializers/integrations/project_entity_spec.rb26
-rw-r--r--spec/serializers/integrations/project_serializer_spec.rb9
-rw-r--r--spec/serializers/jira_connect/app_data_serializer_spec.rb34
-rw-r--r--spec/serializers/jira_connect/group_entity_spec.rb18
-rw-r--r--spec/serializers/jira_connect/subscription_entity_spec.rb22
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb10
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb48
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb43
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb18
-rw-r--r--spec/services/admin/propagate_service_template_spec.rb60
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb17
-rw-r--r--spec/services/auth/dependency_proxy_authentication_service_spec.rb14
-rw-r--r--spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb99
-rw-r--r--spec/services/boards/issues/list_service_spec.rb9
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb16
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb16
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb22
-rw-r--r--spec/services/ci/append_build_trace_service_spec.rb20
-rw-r--r--spec/services/ci/build_cancel_service_spec.rb52
-rw-r--r--spec/services/ci/build_unschedule_service_spec.rb52
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb8
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/dry_run_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb95
-rw-r--r--spec/services/ci/create_pipeline_service/merge_requests_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/needs_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parallel_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parameter_content_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb307
-rw-r--r--spec/services/ci/daily_build_group_report_result_service_spec.rb34
-rw-r--r--spec/services/ci/destroy_pipeline_service_spec.rb12
-rw-r--r--spec/services/ci/drop_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb42
-rw-r--r--spec/services/ci/extract_sections_from_build_trace_service_spec.rb57
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb2
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_and_different_stage_needs.yml54
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_stage_needs.yml70
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb6
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb35
-rw-r--r--spec/services/ci/register_job_service_spec.rb62
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb20
-rw-r--r--spec/services/ci/retry_build_service_spec.rb2
-rw-r--r--spec/services/dependency_proxy/auth_token_service_spec.rb13
-rw-r--r--spec/services/dependency_proxy/download_blob_service_spec.rb17
-rw-r--r--spec/services/dependency_proxy/find_or_create_blob_service_spec.rb2
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb3
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb28
-rw-r--r--spec/services/environments/stop_service_spec.rb (renamed from spec/services/ci/stop_environments_service_spec.rb)74
-rw-r--r--spec/services/error_tracking/issue_details_service_spec.rb17
-rw-r--r--spec/services/error_tracking/issue_latest_event_service_spec.rb22
-rw-r--r--spec/services/error_tracking/issue_update_service_spec.rb15
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb14
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb28
-rw-r--r--spec/services/git/branch_push_service_spec.rb28
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb6
-rw-r--r--spec/services/ide/terminal_config_service_spec.rb2
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb2
-rw-r--r--spec/services/jira/requests/projects/list_service_spec.rb15
-rw-r--r--spec/services/members/create_service_spec.rb74
-rw-r--r--spec/services/members/import_project_team_service_spec.rb91
-rw-r--r--spec/services/merge_requests/add_spent_time_service_spec.rb5
-rw-r--r--spec/services/merge_requests/close_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb58
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb31
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb2
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb2
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb35
-rw-r--r--spec/services/notes/create_service_spec.rb8
-rw-r--r--spec/services/notification_service_spec.rb6
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb35
-rw-r--r--spec/services/packages/create_dependency_service_spec.rb4
-rw-r--r--spec/services/packages/debian/generate_distribution_key_service_spec.rb32
-rw-r--r--spec/services/packages/debian/generate_distribution_service_spec.rb11
-rw-r--r--spec/services/packages/debian/sign_distribution_service_spec.rb60
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb341
-rw-r--r--spec/services/packages/update_package_file_service_spec.rb119
-rw-r--r--spec/services/packages/update_tags_service_spec.rb2
-rw-r--r--spec/services/post_receive_service_spec.rb6
-rw-r--r--spec/services/projects/create_service_spec.rb76
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb4
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb4
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb15
-rw-r--r--spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb20
-rw-r--r--spec/services/projects/operations/update_service_spec.rb25
-rw-r--r--spec/services/projects/update_pages_service_spec.rb111
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb23
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb55
-rw-r--r--spec/services/releases/create_service_spec.rb15
-rw-r--r--spec/services/releases/destroy_service_spec.rb15
-rw-r--r--spec/services/releases/update_service_spec.rb15
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb2
-rw-r--r--spec/services/security/merge_reports_service_spec.rb260
-rw-r--r--spec/services/service_ping/permit_data_categories_service_spec.rb27
-rw-r--r--spec/services/service_ping/service_ping_settings_spec.rb46
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb8
-rw-r--r--spec/services/service_response_spec.rb2
-rw-r--r--spec/services/spam/akismet_mark_as_spam_service_spec.rb (renamed from spec/services/spam/mark_as_spam_service_spec.rb)2
-rw-r--r--spec/services/spam/spam_action_service_spec.rb3
-rw-r--r--spec/services/system_note_service_spec.rb2
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb2
-rw-r--r--spec/services/users/activity_service_spec.rb9
-rw-r--r--spec/services/users/ban_service_spec.rb65
-rw-r--r--spec/services/users/banned_user_base_service_spec.rb14
-rw-r--r--spec/services/users/set_status_service_spec.rb40
-rw-r--r--spec/services/users/unban_service_spec.rb75
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/support/before_all_adapter.rb27
-rw-r--r--spec/support/database/ci_tables.rb22
-rw-r--r--spec/support/database/prevent_cross_database_modification.rb109
-rw-r--r--spec/support/database/prevent_cross_joins.rb77
-rw-r--r--spec/support/database_cleaner.rb6
-rw-r--r--spec/support/database_load_balancing.rb19
-rw-r--r--spec/support/db_cleaner.rb8
-rw-r--r--spec/support/enable_multiple_database_metrics_by_default.rb8
-rw-r--r--spec/support/helpers/board_helpers.rb17
-rw-r--r--spec/support/helpers/dependency_proxy_helpers.rb3
-rw-r--r--spec/support/helpers/features/invite_members_modal_helper.rb12
-rw-r--r--spec/support/helpers/features/top_nav_spec_helpers.rb24
-rw-r--r--spec/support/helpers/graphql_helpers.rb2
-rw-r--r--spec/support/helpers/login_helpers.rb10
-rw-r--r--spec/support/helpers/migrations_helpers.rb17
-rw-r--r--spec/support/helpers/test_env.rb15
-rw-r--r--spec/support/helpers/tracking_helpers.rb7
-rw-r--r--spec/support/helpers/x509_helpers.rb11
-rw-r--r--spec/support/import_export/common_util.rb6
-rw-r--r--spec/support/matchers/background_migrations_matchers.rb30
-rw-r--r--spec/support/matchers/schema_matcher.rb11
-rw-r--r--spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/fixtures/analytics_shared_context.rb70
-rw-r--r--spec/support/shared_contexts/graphql/requests/packages_shared_context.rb6
-rw-r--r--spec/support/shared_contexts/issuable/merge_request_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/issuable/project_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb9
-rw-r--r--spec/support/shared_contexts/lib/gitlab/database/background_migration_job_shared_context.rb21
-rw-r--r--spec/support/shared_contexts/load_balancing_configuration_shared_context.rb19
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb9
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb8
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/controllers/import_controller_status_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/controllers/unique_visits_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/features/dependency_proxy_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb4
-rw-r--r--spec/support/shared_examples/features/manage_applications_shared_examples.rb67
-rw-r--r--spec/support/shared_examples/features/packages_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb6
-rw-r--r--spec/support/shared_examples/features/rss_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/variable_list_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/finders/security/jobs_finder_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/reports/security/locations/locations_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/lib/menus_shared_examples.rb (renamed from spec/support/shared_examples/helpers/groups_shared_examples.rb)28
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb164
-rw-r--r--spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/models/concerns/incident_management/escalatable_shared_examples.rb246
-rw-r--r--spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/models/mentionable_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb110
-rw-r--r--spec/support/shared_examples/namespaces/linear_traversal_examples.rb23
-rw-r--r--spec/support/shared_examples/namespaces/traversal_examples.rb86
-rw-r--r--spec/support/shared_examples/namespaces/traversal_scope_examples.rb68
-rw-r--r--spec/support/shared_examples/policies/project_policy_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb178
-rw-r--r--spec/support/shared_examples/services/jira/requests/base_shared_examples.rb85
-rw-r--r--spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb398
-rw-r--r--spec/support_specs/database/prevent_cross_database_modification_spec.rb144
-rw-r--r--spec/support_specs/database/prevent_cross_joins_spec.rb56
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb13
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb37
-rw-r--r--spec/tasks/gitlab/ldap_rake_spec.rb16
-rw-r--r--spec/tasks/gitlab/product_intelligence_rake_spec.rb80
-rw-r--r--spec/tasks/gitlab/smtp_rake_spec.rb112
-rw-r--r--spec/tasks/gitlab/storage_rake_spec.rb4
-rw-r--r--spec/tooling/danger/product_intelligence_spec.rb22
-rw-r--r--spec/tooling/danger/project_helper_spec.rb35
-rw-r--r--spec/tooling/graphql/docs/renderer_spec.rb44
-rw-r--r--spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb10
-rw-r--r--spec/validators/any_field_validator_spec.rb44
-rw-r--r--spec/views/admin/dashboard/index.html.haml_spec.rb8
-rw-r--r--spec/views/admin/sessions/new.html.haml_spec.rb8
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb4
-rw-r--r--spec/views/groups/edit.html.haml_spec.rb2
-rw-r--r--spec/views/groups/runners/_sort_dropdown.html.haml_spec.rb31
-rw-r--r--spec/views/groups/settings/_transfer.html.haml_spec.rb4
-rw-r--r--spec/views/groups/show.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb4
-rw-r--r--spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb154
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb4
-rw-r--r--spec/views/projects/commits/show.html.haml_spec.rb35
-rw-r--r--spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb63
-rw-r--r--spec/views/projects/empty.html.haml_spec.rb11
-rw-r--r--spec/views/projects/merge_requests/show.html.haml_spec.rb30
-rw-r--r--spec/views/projects/tree/show.html.haml_spec.rb1
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb24
-rw-r--r--spec/views/search/show.html.haml_spec.rb49
-rw-r--r--spec/views/shared/access_tokens/_table.html.haml_spec.rb166
-rw-r--r--spec/views/shared/deploy_tokens/_form.html.haml_spec.rb4
-rw-r--r--spec/workers/analytics/usage_trends/counter_job_worker_spec.rb2
-rw-r--r--spec/workers/authorized_project_update/project_recalculate_worker_spec.rb2
-rw-r--r--spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb6
-rw-r--r--spec/workers/build_finished_worker_spec.rb1
-rw-r--r--spec/workers/ci/build_finished_worker_spec.rb1
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb137
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb141
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb17
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb6
-rw-r--r--spec/workers/database/drop_detached_partitions_worker_spec.rb29
-rw-r--r--spec/workers/deployments/hooks_worker_spec.rb5
-rw-r--r--spec/workers/environments/auto_delete_cron_worker_spec.rb105
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb9
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb27
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb9
-rw-r--r--spec/workers/gitlab/import/stuck_import_job_spec.rb36
-rw-r--r--spec/workers/jira_connect/forward_event_worker_spec.rb22
-rw-r--r--spec/workers/jira_connect/retry_request_worker_spec.rb45
-rw-r--r--spec/workers/merge_request_mergeability_check_worker_spec.rb20
-rw-r--r--spec/workers/packages/debian/generate_distribution_worker_spec.rb3
-rw-r--r--spec/workers/pipeline_notification_worker_spec.rb4
-rw-r--r--spec/workers/post_receive_spec.rb56
-rw-r--r--spec/workers/propagate_integration_worker_spec.rb3
-rw-r--r--spec/workers/propagate_service_template_worker_spec.rb31
-rw-r--r--spec/workers/repository_remove_remote_worker_spec.rb20
-rw-r--r--spec/workers/users/create_statistics_worker_spec.rb2
-rw-r--r--spec/workers/web_hook_worker_spec.rb4
1289 files changed, 40504 insertions, 15438 deletions
diff --git a/spec/controllers/admin/clusters/applications_controller_spec.rb b/spec/controllers/admin/clusters/applications_controller_spec.rb
deleted file mode 100644
index d1ca64d6bd2..00000000000
--- a/spec/controllers/admin/clusters/applications_controller_spec.rb
+++ /dev/null
@@ -1,139 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Admin::Clusters::ApplicationsController do
- include AccessMatchersForController
-
- def current_application
- Clusters::Cluster::APPLICATIONS[application]
- end
-
- shared_examples 'a secure endpoint' do
- it { expect { subject }.to be_allowed_for(:admin) }
- it { expect { subject }.to be_denied_for(:user) }
- it { expect { subject }.to be_denied_for(:external) }
- end
-
- let(:cluster) { create(:cluster, :instance, :provided_by_gcp) }
-
- describe 'POST create' do
- subject do
- post :create, params: params
- end
-
- let(:application) { 'ingress' }
- let(:params) { { application: application, id: cluster.id } }
-
- describe 'functionality' do
- let(:admin) { create(:admin) }
-
- before do
- sign_in(admin)
- end
-
- it 'schedule an application installation' do
- expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
-
- expect { subject }.to change { current_application.count }
- expect(response).to have_gitlab_http_status(:no_content)
- expect(cluster.application_ingress).to be_scheduled
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it 'return 404' do
- expect { subject }.not_to change { current_application.count }
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when application is unknown' do
- let(:application) { 'unkwnown-app' }
-
- it 'return 404' do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when application is already installing' do
- before do
- create(:clusters_applications_ingress, :installing, cluster: cluster)
- end
-
- it 'returns 400' do
- is_expected.to have_gitlab_http_status(:bad_request)
- end
- end
- end
-
- describe 'security' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-
- describe 'PATCH update' do
- subject do
- patch :update, params: params
- end
-
- let!(:application) { create(:clusters_applications_cert_manager, :installed, cluster: cluster) }
- let(:application_name) { application.name }
- let(:params) { { application: application_name, id: cluster.id, email: "new-email@example.com" } }
-
- describe 'functionality' do
- let(:admin) { create(:admin) }
-
- before do
- sign_in(admin)
- end
-
- context "when cluster and app exists" do
- it "schedules an application update" do
- expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
-
- is_expected.to have_gitlab_http_status(:no_content)
-
- expect(cluster.application_cert_manager).to be_scheduled
- end
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is unknown' do
- let(:application_name) { 'unkwnown-app' }
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is already scheduled' do
- before do
- application.make_scheduled!
- end
-
- it { is_expected.to have_gitlab_http_status(:bad_request) }
- end
- end
-
- describe 'security' do
- before do
- allow(ClusterPatchAppWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 5a68bb2749b..64ae2a95b4e 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -43,15 +43,15 @@ RSpec.describe Admin::IntegrationsController do
stub_jira_integration_test
allow(PropagateIntegrationWorker).to receive(:perform_async)
- put :update, params: { id: integration.class.to_param, service: { url: url } }
+ put :update, params: { id: integration.class.to_param, service: params }
end
context 'valid params' do
- let(:url) { 'https://jira.gitlab-example.com' }
+ let(:params) { { url: 'https://jira.gitlab-example.com', password: 'password' } }
it 'updates the integration' do
expect(response).to have_gitlab_http_status(:found)
- expect(integration.reload.url).to eq(url)
+ expect(integration.reload).to have_attributes(params)
end
it 'calls to PropagateIntegrationWorker' do
@@ -60,12 +60,12 @@ RSpec.describe Admin::IntegrationsController do
end
context 'invalid params' do
- let(:url) { 'invalid' }
+ let(:params) { { url: 'invalid', password: 'password' } }
it 'does not update the integration' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
- expect(integration.reload.url).not_to eq(url)
+ expect(integration.reload).not_to have_attributes(params)
end
it 'does not call to PropagateIntegrationWorker' do
@@ -97,4 +97,40 @@ RSpec.describe Admin::IntegrationsController do
.and change { Integrations::Jira.inherit_from_id(integration.id).count }.by(-1)
end
end
+
+ describe '#overrides' do
+ let_it_be(:instance_integration) { create(:bugzilla_integration, :instance) }
+ let_it_be(:non_overridden_integration) { create(:bugzilla_integration, inherit_from_id: instance_integration.id) }
+ let_it_be(:overridden_integration) { create(:bugzilla_integration) }
+ let_it_be(:overridden_other_integration) { create(:confluence_integration) }
+
+ subject do
+ get :overrides, params: { id: instance_integration.class.to_param }, format: format
+ end
+
+ context 'when format is JSON' do
+ let(:format) { :json }
+
+ include_context 'JSON response'
+
+ it 'returns projects with overrides', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to contain_exactly(a_hash_including('full_name' => overridden_integration.project.full_name))
+ end
+ end
+
+ context 'when format is HTML' do
+ let(:format) { :html }
+
+ it 'renders template' do
+ subject
+
+ expect(response).to render_template 'shared/integrations/overrides'
+ expect(assigns(:integration)).to eq(instance_integration)
+ end
+ end
+ end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index b5e7af2c012..8e57b4f03a7 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -12,42 +12,11 @@ RSpec.describe Admin::RunnersController do
describe '#index' do
render_views
- before do
- stub_feature_flags(runner_list_view_vue_ui: false)
- end
-
it 'lists all runners' do
get :index
expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'avoids N+1 queries', :request_store do
- get :index
-
- control_count = ActiveRecord::QueryRecorder.new { get :index }.count
-
- create_list(:ci_runner, 5, :tagged_only)
-
- # There is still an N+1 query for `runner.builds.count`
- # We also need to add 1 because it takes 2 queries to preload tags
- # also looking for token nonce requires database queries
- expect { get :index }.not_to exceed_query_limit(control_count + 16)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.body).to have_content('tag1')
- expect(response.body).to have_content('tag2')
- end
-
- it 'paginates runners' do
- stub_const("Admin::RunnersController::NUMBER_OF_RUNNERS_PER_PAGE", 1)
-
- create(:ci_runner)
-
- get :index
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:runners).count).to be(1)
+ expect(response).to render_template(:index)
end
end
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
deleted file mode 100644
index 06ff8f0db94..00000000000
--- a/spec/controllers/admin/services_controller_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Admin::ServicesController do
- let(:admin) { create(:admin) }
-
- before do
- sign_in(admin)
- end
-
- describe 'GET #edit' do
- let(:service) do
- create(:jira_integration, :template)
- end
-
- it 'successfully displays the template' do
- get :edit, params: { id: service.id }
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- context 'when integration does not exists' do
- it 'redirects to the admin application integration page' do
- get :edit, params: { id: 'invalid' }
-
- expect(response).to redirect_to(admin_application_settings_services_path)
- end
- end
-
- context 'when instance integration exists' do
- before do
- create(:jira_integration, :instance)
- end
-
- it 'redirects to the admin application integration page' do
- get :edit, params: { id: service.id }
-
- expect(response).to redirect_to(admin_application_settings_services_path)
- end
- end
- end
-
- describe "#update" do
- let(:project) { create(:project) }
- let!(:service_template) do
- Integrations::Redmine.create!(
- project: nil,
- active: false,
- template: true,
- properties: {
- project_url: 'http://abc',
- issues_url: 'http://abc',
- new_issue_url: 'http://abc'
- }
- )
- end
-
- it 'calls the propagation worker when service is active' do
- expect(PropagateServiceTemplateWorker).to receive(:perform_async).with(service_template.id)
-
- put :update, params: { id: service_template.id, service: { active: true } }
-
- expect(response).to have_gitlab_http_status(:found)
- end
-
- it 'does not call the propagation worker when service is not active' do
- expect(PropagateServiceTemplateWorker).not_to receive(:perform_async)
-
- put :update, params: { id: service_template.id, service: { properties: {} } }
-
- expect(response).to have_gitlab_http_status(:found)
- end
- end
-end
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index 6dc5c38cb76..6e172f53257 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -359,13 +359,12 @@ RSpec.describe Admin::UsersController do
end
end
- describe 'PUT ban/:id' do
+ describe 'PUT ban/:id', :aggregate_failures do
context 'when ban_user_feature_flag is enabled' do
it 'bans user' do
put :ban, params: { id: user.username }
- user.reload
- expect(user.banned?).to be_truthy
+ expect(user.reload.banned?).to be_truthy
expect(flash[:notice]).to eq _('Successfully banned')
end
@@ -390,21 +389,19 @@ RSpec.describe Admin::UsersController do
it 'does not ban user, renders 404' do
put :ban, params: { id: user.username }
- user.reload
- expect(user.banned?).to be_falsey
+ expect(user.reload.banned?).to be_falsey
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
- describe 'PUT unban/:id' do
+ describe 'PUT unban/:id', :aggregate_failures do
let(:banned_user) { create(:user, :banned) }
it 'unbans user' do
put :unban, params: { id: banned_user.username }
- banned_user.reload
- expect(banned_user.banned?).to be_falsey
+ expect(banned_user.reload.banned?).to be_falsey
expect(flash[:notice]).to eq _('Successfully unbanned')
end
end
@@ -419,6 +416,7 @@ RSpec.describe Admin::UsersController do
put :unlock, params: { id: user.username }
user.reload
expect(user.access_locked?).to be_falsey
+ expect(flash[:notice]).to eq _('Successfully unlocked')
end
end
diff --git a/spec/controllers/concerns/redis_tracking_spec.rb b/spec/controllers/concerns/redis_tracking_spec.rb
index 4077f4f5cce..178684ae2d0 100644
--- a/spec/controllers/concerns/redis_tracking_spec.rb
+++ b/spec/controllers/concerns/redis_tracking_spec.rb
@@ -3,6 +3,8 @@
require "spec_helper"
RSpec.describe RedisTracking do
+ include TrackingHelpers
+
let(:user) { create(:user) }
controller(ApplicationController) do
@@ -60,7 +62,7 @@ RSpec.describe RedisTracking do
end
it 'tracks the event if DNT is not enabled' do
- request.headers['DNT'] = '0'
+ stub_do_not_track('0')
expect_tracking
@@ -68,7 +70,7 @@ RSpec.describe RedisTracking do
end
it 'does not track the event if DNT is enabled' do
- request.headers['DNT'] = '1'
+ stub_do_not_track('1')
expect_no_tracking
diff --git a/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb b/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb
new file mode 100644
index 00000000000..7c10dccdcb9
--- /dev/null
+++ b/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SpammableActions::AkismetMarkAsSpamAction do
+ include AfterNextHelpers
+
+ controller(ActionController::Base) do
+ include SpammableActions::AkismetMarkAsSpamAction
+
+ private
+
+ def spammable_path
+ '/fake_spammable_path'
+ end
+ end
+
+ let(:spammable_type) { 'SpammableType' }
+ let(:spammable) { double(:spammable, spammable_entity_type: double(:spammable_entity_type, titlecase: spammable_type)) }
+ let(:current_user) { create(:admin) }
+
+ before do
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
+ routes.draw { get 'mark_as_spam' => 'anonymous#mark_as_spam' }
+ allow(controller).to receive(:spammable) { spammable }
+ allow(controller).to receive(:current_user) { double(:current_user, admin?: admin) }
+ allow(controller).to receive(:current_user).and_return(current_user)
+ end
+
+ describe '#mark_as_spam' do
+ subject { post :mark_as_spam }
+
+ before do
+ expect_next(Spam::AkismetMarkAsSpamService, target: spammable)
+ .to receive(:execute).and_return(execute_result)
+ end
+
+ context 'when user is admin', :enable_admin_mode do
+ let(:admin) { true }
+
+ context 'when service returns truthy' do
+ let(:execute_result) { true }
+
+ it 'redirects with notice' do
+ expect(subject).to redirect_to('/fake_spammable_path')
+ expect(subject.request.flash[:notice]).to match(/#{spammable_type}.*submitted.*successfully/)
+ end
+ end
+
+ context 'when service returns falsey' do
+ let(:execute_result) { false }
+
+ it 'redirects with notice' do
+ expect(subject).to redirect_to('/fake_spammable_path')
+ expect(subject.request.flash[:alert]).to match(/Error/)
+ end
+ end
+ end
+
+ context 'when user is not admin' do
+ let(:admin) { false }
+ let(:execute_result) { true }
+
+ it 'calls #access_denied!' do
+ expect(controller).to receive(:access_denied!) { false }
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb b/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb
new file mode 100644
index 00000000000..53a78326397
--- /dev/null
+++ b/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SpammableActions::CaptchaCheck::HtmlFormatActionsSupport do
+ controller(ActionController::Base) do
+ include SpammableActions::CaptchaCheck::HtmlFormatActionsSupport
+
+ def create
+ with_captcha_check_html_format { render :some_rendered_view }
+ end
+ end
+
+ let(:spammable) { double(:spammable) }
+
+ before do
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
+ routes.draw { get 'create' => 'anonymous#create' }
+ allow(controller).to receive(:spammable) { spammable }
+ expect(spammable).to receive(:render_recaptcha?).at_least(:once) { render_recaptcha }
+ end
+
+ describe '#convert_html_spam_params_to_headers' do
+ let(:render_recaptcha) { false }
+ let(:g_recaptcha_response) { 'abc123' }
+ let(:spam_log_id) { 42 }
+
+ let(:params) do
+ {
+ 'g-recaptcha-response' => g_recaptcha_response,
+ spam_log_id: spam_log_id
+ }
+ end
+
+ # NOTE: `:update` has an identical `before_action` behavior to ``:create``, but `before_action` is
+ # declarative via the ``:only`` attribute, so there's little value in re-testing the behavior.
+ subject { post :create, params: params }
+
+ before do
+ allow(controller).to receive(:render).with(:some_rendered_view)
+ end
+
+ it 'converts params to headers' do
+ subject
+
+ expect(controller.request.headers['X-GitLab-Captcha-Response']).to eq(g_recaptcha_response)
+ expect(controller.request.headers['X-GitLab-Spam-Log-Id']).to eq(spam_log_id.to_s)
+ end
+ end
+
+ describe '#with_captcha_check_html_format' do
+ subject { post :create }
+
+ context 'when spammable.render_recaptcha? is true' do
+ let(:render_recaptcha) { true }
+
+ it 'renders :captcha_check' do
+ expect(controller).to receive(:render).with(:captcha_check)
+
+ subject
+ end
+ end
+
+ context 'when spammable.render_recaptcha? is false' do
+ let(:render_recaptcha) { false }
+
+ it 'yields to block' do
+ expect(controller).to receive(:render).with(:some_rendered_view)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb b/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb
new file mode 100644
index 00000000000..d7a44351ad8
--- /dev/null
+++ b/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SpammableActions::CaptchaCheck::JsonFormatActionsSupport do
+ controller(ActionController::Base) do
+ include SpammableActions::CaptchaCheck::JsonFormatActionsSupport
+
+ def some_action
+ with_captcha_check_json_format { render :some_rendered_view }
+ end
+ end
+
+ before do
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
+ end
+
+ describe '#with_captcha_check_json_format' do
+ subject { post :some_action }
+
+ let(:spammable) { double(:spammable) }
+
+ before do
+ routes.draw { get 'some_action' => 'anonymous#some_action' }
+ allow(controller).to receive(:spammable) { spammable }
+ expect(spammable).to receive(:render_recaptcha?).at_least(:once) { render_recaptcha }
+ end
+
+ context 'when spammable.render_recaptcha? is true' do
+ let(:render_recaptcha) { true }
+ let(:spam_log) { double(:spam_log, id: 1) }
+ let(:spammable) { double(:spammable, spam?: true, render_recaptcha?: render_recaptcha, spam_log: spam_log) }
+ let(:recaptcha_site_key) { 'abc123' }
+ let(:spam_action_response_fields) do
+ {
+ spam: true,
+ needs_captcha_response: render_recaptcha,
+ spam_log_id: 1,
+ captcha_site_key: recaptcha_site_key
+ }
+ end
+
+ it 'renders json containing spam_action_response_fields' do
+ expect(controller).to receive(:render).with(json: spam_action_response_fields, status: :conflict)
+ allow(Gitlab::CurrentSettings).to receive(:recaptcha_site_key) { recaptcha_site_key }
+ subject
+ end
+ end
+
+ context 'when spammable.render_recaptcha? is false' do
+ let(:render_recaptcha) { false }
+
+ it 'yields to block' do
+ expect(controller).to receive(:render).with(:some_rendered_view)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/controllers/concerns/spammable_actions_spec.rb b/spec/controllers/concerns/spammable_actions_spec.rb
deleted file mode 100644
index 7bd5a76e60c..00000000000
--- a/spec/controllers/concerns/spammable_actions_spec.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe SpammableActions do
- controller(ActionController::Base) do
- include SpammableActions
-
- # #update is used here to test #recaptcha_check_with_fallback, but it could be invoked
- # from #create or any other action which mutates a spammable via a controller.
- def update
- should_redirect = params[:should_redirect] == 'true'
-
- recaptcha_check_with_fallback(should_redirect) { render json: :ok }
- end
-
- private
-
- def spammable_path
- '/fake_spammable_path'
- end
- end
-
- before do
- allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
- end
-
- describe '#recaptcha_check_with_fallback' do
- shared_examples 'yields to block' do
- it do
- subject
-
- expect(json_response).to eq({ json: 'ok' })
- end
- end
-
- let(:format) { :html }
-
- subject { post :update, format: format, params: params }
-
- let(:spammable) { double(:spammable) }
- let(:should_redirect) { nil }
- let(:params) do
- {
- should_redirect: should_redirect
- }
- end
-
- before do
- routes.draw { get 'update' => 'anonymous#update' }
- allow(controller).to receive(:spammable) { spammable }
- end
-
- context 'when should_redirect is true and spammable is valid' do
- let(:should_redirect) { true }
-
- before do
- allow(spammable).to receive(:valid?) { true }
- end
-
- it 'redirects to spammable_path' do
- expect(subject).to redirect_to('/fake_spammable_path')
- end
- end
-
- context 'when should_redirect is false or spammable is not valid' do
- before do
- allow(spammable).to receive(:valid?) { false }
- end
-
- context 'when spammable.render_recaptcha? is true' do
- let(:spam_log) { instance_double(SpamLog, id: 123) }
- let(:captcha_site_key) { 'abc123' }
-
- before do
- expect(spammable).to receive(:render_recaptcha?).at_least(:once) { true }
- end
-
- context 'when format is :html' do
- it 'renders :verify' do
- expect(controller).to receive(:render).with(:verify)
-
- subject
- end
- end
-
- context 'when format is :json' do
- let(:format) { :json }
-
- before do
- expect(spammable).to receive(:spam?) { false }
- expect(spammable).to receive(:spam_log) { spam_log }
- expect(Gitlab::CurrentSettings).to receive(:recaptcha_site_key) { captcha_site_key }
- end
-
- it 'renders json with spam_action_response_fields' do
- subject
-
- expected_json_response = HashWithIndifferentAccess.new(
- {
- spam: false,
- needs_captcha_response: true,
- spam_log_id: spam_log.id,
- captcha_site_key: captcha_site_key
- })
- expect(json_response).to eq(expected_json_response)
- end
- end
- end
- end
- end
-end
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index 0d9bd146778..9b13025cbe3 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -179,7 +179,7 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures do
expect(response).to render_template('dashboard/projects/index')
expect(response.body).to include(
"pushed to project",
- "uploaded design #{design.to_reference}",
+ "added design #{design.to_reference}",
"created wiki page #{wiki_page.title}",
"joined project #{project.full_name}",
"closed issue #{issue.to_reference}"
diff --git a/spec/controllers/groups/clusters/applications_controller_spec.rb b/spec/controllers/groups/clusters/applications_controller_spec.rb
deleted file mode 100644
index 5629e86c928..00000000000
--- a/spec/controllers/groups/clusters/applications_controller_spec.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Groups::Clusters::ApplicationsController do
- include AccessMatchersForController
-
- def current_application
- Clusters::Cluster::APPLICATIONS[application]
- end
-
- shared_examples 'a secure endpoint' do
- it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { expect { subject }.to be_allowed_for(:admin) }
- it('is denied for admin when admin mode is disabled') { expect { subject }.to be_denied_for(:admin) }
- it { expect { subject }.to be_allowed_for(:owner).of(group) }
- it { expect { subject }.to be_allowed_for(:maintainer).of(group) }
- it { expect { subject }.to be_denied_for(:developer).of(group) }
- it { expect { subject }.to be_denied_for(:reporter).of(group) }
- it { expect { subject }.to be_denied_for(:guest).of(group) }
- it { expect { subject }.to be_denied_for(:user) }
- it { expect { subject }.to be_denied_for(:external) }
- end
-
- let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
- let(:group) { cluster.group }
-
- describe 'POST create' do
- subject do
- post :create, params: params.merge(group_id: group)
- end
-
- let(:application) { 'ingress' }
- let(:params) { { application: application, id: cluster.id } }
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- group.add_maintainer(user)
- sign_in(user)
- end
-
- it 'schedule an application installation' do
- expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
-
- expect { subject }.to change { current_application.count }
- expect(response).to have_gitlab_http_status(:no_content)
- expect(cluster.application_ingress).to be_scheduled
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it 'return 404' do
- expect { subject }.not_to change { current_application.count }
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when application is unknown' do
- let(:application) { 'unkwnown-app' }
-
- it 'return 404' do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when application is already installing' do
- before do
- create(:clusters_applications_ingress, :installing, cluster: cluster)
- end
-
- it 'returns 400' do
- is_expected.to have_gitlab_http_status(:bad_request)
- end
- end
- end
-
- describe 'security' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-
- describe 'PATCH update' do
- subject do
- patch :update, params: params.merge(group_id: group)
- end
-
- let!(:application) { create(:clusters_applications_cert_manager, :installed, cluster: cluster) }
- let(:application_name) { application.name }
- let(:params) { { application: application_name, id: cluster.id, email: "new-email@example.com" } }
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- group.add_maintainer(user)
- sign_in(user)
- end
-
- context "when cluster and app exists" do
- it "schedules an application update" do
- expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
-
- is_expected.to have_gitlab_http_status(:no_content)
-
- expect(cluster.application_cert_manager).to be_scheduled
- end
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is unknown' do
- let(:application_name) { 'unkwnown-app' }
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is already scheduled' do
- before do
- application.make_scheduled!
- end
-
- it { is_expected.to have_gitlab_http_status(:bad_request) }
- end
- end
-
- describe 'security' do
- before do
- allow(ClusterPatchAppWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-end
diff --git a/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb b/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
index f67b2022219..50e19d5b482 100644
--- a/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
@@ -30,16 +30,31 @@ RSpec.describe Groups::DependencyProxyAuthController do
end
context 'with valid JWT' do
- let_it_be(:user) { create(:user) }
+ context 'user' do
+ let_it_be(:user) { create(:user) }
- let(:jwt) { build_jwt(user) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ let(:jwt) { build_jwt(user) }
+ let(:token_header) { "Bearer #{jwt.encoded}" }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ before do
+ request.headers['HTTP_AUTHORIZATION'] = token_header
+ end
+
+ it { is_expected.to have_gitlab_http_status(:success) }
end
- it { is_expected.to have_gitlab_http_status(:success) }
+ context 'deploy token' do
+ let_it_be(:user) { create(:deploy_token) }
+
+ let(:jwt) { build_jwt(user) }
+ let(:token_header) { "Bearer #{jwt.encoded}" }
+
+ before do
+ request.headers['HTTP_AUTHORIZATION'] = token_header
+ end
+
+ it { is_expected.to have_gitlab_http_status(:success) }
+ end
end
context 'with invalid JWT' do
@@ -51,7 +66,7 @@ RSpec.describe Groups::DependencyProxyAuthController do
request.headers['HTTP_AUTHORIZATION'] = token_header
end
- it { is_expected.to have_gitlab_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
context 'token with no user id' do
@@ -61,7 +76,7 @@ RSpec.describe Groups::DependencyProxyAuthController do
request.headers['HTTP_AUTHORIZATION'] = token_header
end
- it { is_expected.to have_gitlab_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
context 'expired token' do
@@ -76,6 +91,32 @@ RSpec.describe Groups::DependencyProxyAuthController do
it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
+
+ context 'expired deploy token' do
+ let_it_be(:user) { create(:deploy_token, :expired) }
+
+ let(:jwt) { build_jwt(user) }
+ let(:token_header) { "Bearer #{jwt.encoded}" }
+
+ before do
+ request.headers['HTTP_AUTHORIZATION'] = token_header
+ end
+
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
+ end
+
+ context 'revoked deploy token' do
+ let_it_be(:user) { create(:deploy_token, :revoked) }
+
+ let(:jwt) { build_jwt(user) }
+ let(:token_header) { "Bearer #{jwt.encoded}" }
+
+ before do
+ request.headers['HTTP_AUTHORIZATION'] = token_header
+ end
+
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
+ end
end
end
end
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 9f30a850ca2..7415c2860c8 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -7,11 +7,12 @@ RSpec.describe Groups::DependencyProxyForContainersController do
include DependencyProxyHelpers
let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:group) { create(:group, :private) }
- let(:group) { create(:group) }
let(:token_response) { { status: :success, token: 'abcd1234' } }
let(:jwt) { build_jwt(user) }
let(:token_header) { "Bearer #{jwt.encoded}" }
+ let(:snowplow_gitlab_standard_context) { { namespace: group, user: user } }
shared_examples 'without a token' do
before do
@@ -19,6 +20,8 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
context 'feature flag disabled' do
+ let_it_be(:group) { create(:group) }
+
before do
stub_feature_flags(dependency_proxy_for_private_groups: false)
end
@@ -34,13 +37,12 @@ RSpec.describe Groups::DependencyProxyForContainersController do
stub_feature_flags(dependency_proxy_for_private_groups: false)
end
- it 'redirects', :aggregate_failures do
+ it 'returns not found' do
group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
subject
- expect(response).to have_gitlab_http_status(:redirect)
- expect(response.location).to end_with(new_user_session_path)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -52,21 +54,52 @@ RSpec.describe Groups::DependencyProxyForContainersController do
request.headers['HTTP_AUTHORIZATION'] = token_header
end
- it { is_expected.to have_gitlab_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
context 'with valid user that does not have access' do
- let(:group) { create(:group, :private) }
-
before do
- user = double('bad_user', id: 999)
- token_header = "Bearer #{build_jwt(user).encoded}"
request.headers['HTTP_AUTHORIZATION'] = token_header
end
it { is_expected.to have_gitlab_http_status(:not_found) }
end
+ context 'with deploy token from a different group,' do
+ let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'with revoked deploy token' do
+ let_it_be(:user) { create(:deploy_token, :revoked, :group, :dependency_proxy_scopes) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
+ end
+
+ context 'with expired deploy token' do
+ let_it_be(:user) { create(:deploy_token, :expired, :group, :dependency_proxy_scopes) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
+ end
+
+ context 'with deploy token with insufficient scopes' do
+ let_it_be(:user) { create(:deploy_token, :group) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'when a group is not found' do
+ before do
+ expect(Group).to receive(:find_by_full_path).and_return(nil)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
context 'when user is not found' do
before do
allow(User).to receive(:find).and_return(nil)
@@ -104,7 +137,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
describe 'GET #manifest' do
let_it_be(:manifest) { create(:dependency_proxy_manifest) }
- let(:pull_response) { { status: :success, manifest: manifest } }
+ let(:pull_response) { { status: :success, manifest: manifest, from_cache: false } }
before do
allow_next_instance_of(DependencyProxy::FindOrCreateManifestService) do |instance|
@@ -132,6 +165,10 @@ RSpec.describe Groups::DependencyProxyForContainersController do
}
end
+ before do
+ group.add_guest(user)
+ end
+
it 'proxies status from the remote token request', :aggregate_failures do
subject
@@ -149,6 +186,10 @@ RSpec.describe Groups::DependencyProxyForContainersController do
}
end
+ before do
+ group.add_guest(user)
+ end
+
it 'proxies status from the remote manifest request', :aggregate_failures do
subject
@@ -157,21 +198,39 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
- it 'sends a file' do
- expect(controller).to receive(:send_file).with(manifest.file.path, type: manifest.content_type)
+ context 'a valid user' do
+ before do
+ group.add_guest(user)
+ end
- subject
+ it_behaves_like 'a successful manifest pull'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_manifest'
+
+ context 'with a cache entry' do
+ let(:pull_response) { { status: :success, manifest: manifest, from_cache: true } }
+
+ it_behaves_like 'returning response status', :success
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_manifest_from_cache'
+ end
end
- it 'returns Content-Disposition: attachment' do
- subject
+ context 'a valid deploy token' do
+ let_it_be(:user) { create(:deploy_token, :dependency_proxy_scopes, :group) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it_behaves_like 'a successful manifest pull'
+
+ context 'pulling from a subgroup' do
+ let_it_be_with_reload(:parent_group) { create(:group) }
+ let_it_be_with_reload(:group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.create_dependency_proxy_setting!(enabled: true)
+ group_deploy_token.update_column(:group_id, parent_group.id)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Docker-Content-Digest']).to eq(manifest.digest)
- expect(response.headers['Content-Length']).to eq(manifest.size)
- expect(response.headers['Docker-Distribution-Api-Version']).to eq(DependencyProxy::DISTRIBUTION_API_VERSION)
- expect(response.headers['Etag']).to eq("\"#{manifest.digest}\"")
- expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ it_behaves_like 'a successful manifest pull'
+ end
end
end
@@ -186,7 +245,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
let_it_be(:blob) { create(:dependency_proxy_blob) }
let(:blob_sha) { blob.file_name.sub('.gz', '') }
- let(:blob_response) { { status: :success, blob: blob } }
+ let(:blob_response) { { status: :success, blob: blob, from_cache: false } }
before do
allow_next_instance_of(DependencyProxy::FindOrCreateBlobService) do |instance|
@@ -214,6 +273,10 @@ RSpec.describe Groups::DependencyProxyForContainersController do
}
end
+ before do
+ group.add_guest(user)
+ end
+
it 'proxies status from the remote blob request', :aggregate_failures do
subject
@@ -222,17 +285,39 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
- it 'sends a file' do
- expect(controller).to receive(:send_file).with(blob.file.path, {})
+ context 'a valid user' do
+ before do
+ group.add_guest(user)
+ end
- subject
+ it_behaves_like 'a successful blob pull'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_blob'
+
+ context 'with a cache entry' do
+ let(:blob_response) { { status: :success, blob: blob, from_cache: true } }
+
+ it_behaves_like 'returning response status', :success
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_blob_from_cache'
+ end
end
- it 'returns Content-Disposition: attachment', :aggregate_failures do
- subject
+ context 'a valid deploy token' do
+ let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it_behaves_like 'a successful blob pull'
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ context 'pulling from a subgroup' do
+ let_it_be_with_reload(:parent_group) { create(:group) }
+ let_it_be_with_reload(:group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.create_dependency_proxy_setting!(enabled: true)
+ group_deploy_token.update_column(:group_id, parent_group.id)
+ end
+
+ it_behaves_like 'a successful blob pull'
+ end
end
end
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index 2f1c6c813cf..1808969cd60 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -15,6 +15,33 @@ RSpec.describe Groups::RunnersController do
sign_in(user)
end
+ describe '#index' do
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'renders show with 200 status code' do
+ get :index, params: { group_id: group }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+ end
+
+ context 'when user is not owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'renders a 404' do
+ get :index, params: { group_id: group }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
describe '#show' do
context 'when user is owner' do
before do
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index ef8f9f69710..931e726850a 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -69,25 +69,25 @@ RSpec.describe Groups::Settings::IntegrationsController do
group.add_owner(user)
stub_jira_integration_test
- put :update, params: { group_id: group, id: integration.class.to_param, service: { url: url } }
+ put :update, params: { group_id: group, id: integration.class.to_param, service: params }
end
context 'valid params' do
- let(:url) { 'https://jira.gitlab-example.com' }
+ let(:params) { { url: 'https://jira.gitlab-example.com', password: 'password' } }
it 'updates the integration' do
expect(response).to have_gitlab_http_status(:found)
- expect(integration.reload.url).to eq(url)
+ expect(integration.reload).to have_attributes(params)
end
end
context 'invalid params' do
- let(:url) { 'invalid' }
+ let(:params) { { url: 'invalid', password: 'password' } }
it 'does not update the integration' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
- expect(integration.reload.url).not_to eq(url)
+ expect(integration.reload).not_to have_attributes(params)
end
end
end
diff --git a/spec/controllers/import/available_namespaces_controller_spec.rb b/spec/controllers/import/available_namespaces_controller_spec.rb
index ebccc862a13..0f98d649338 100644
--- a/spec/controllers/import/available_namespaces_controller_spec.rb
+++ b/spec/controllers/import/available_namespaces_controller_spec.rb
@@ -4,26 +4,94 @@ require 'spec_helper'
RSpec.describe Import::AvailableNamespacesController do
let_it_be(:user) { create(:user) }
- let_it_be(:manageable_groups) { [create(:group), create(:group)] }
before do
sign_in(user)
- manageable_groups.each { |group| group.add_maintainer(user) }
end
describe "GET index" do
- it "returns list of available namespaces" do
- unrelated_group = create(:group)
+ context "when having group with role never allowed to create projects" do
+ using RSpec::Parameterized::TableSyntax
- get :index
+ where(
+ role: [:guest, :reporter],
+ default_project_creation_access: [::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS],
+ group_project_creation_level: [nil, ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS])
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_kind_of(Array)
+ with_them do
+ before do
+ stub_application_setting(default_project_creation: default_project_creation_access)
+ end
- response_ids = json_response.map { |n| n["id"] }
+ it "does not include group with access level #{params[:role]} in list" do
+ group = create(:group, project_creation_level: group_project_creation_level)
+ group.add_user(user, role)
+ get :index
- expect(response_ids).not_to include(unrelated_group.id)
- expect(response_ids).to contain_exactly(*manageable_groups.map(&:id))
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).not_to include({
+ 'id' => group.id,
+ 'full_path' => group.full_path
+ })
+ end
+ end
+ end
+
+ context "when having group with role always allowed to create projects" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(
+ role: [:maintainer, :owner],
+ default_project_creation_access: [::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS],
+ group_project_creation_level: [nil, ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS])
+
+ with_them do
+ before do
+ stub_application_setting(default_project_creation: default_project_creation_access)
+ end
+
+ it "does not include group with access level #{params[:role]} in list" do
+ group = create(:group, project_creation_level: group_project_creation_level)
+ group.add_user(user, role)
+ get :index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include({
+ 'id' => group.id,
+ 'full_path' => group.full_path
+ })
+ end
+ end
+ end
+
+ context "when having developer role" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:default_project_creation_access, :project_creation_level, :is_visible) do
+ ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | nil | false
+ ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | true
+ ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | nil | true
+ ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(default_project_creation: default_project_creation_access)
+ end
+
+ it "#{params[:is_visible] ? 'includes' : 'does not include'} group with access level #{params[:role]} in list" do
+ group = create(:group, project_creation_level: project_creation_level)
+ group.add_user(user, :developer)
+
+ get :index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).send(is_visible ? 'to' : 'not_to', include({
+ 'id' => group.id,
+ 'full_path' => group.full_path
+ }))
+ end
+ end
end
context "with an anonymous user" do
diff --git a/spec/controllers/import/manifest_controller_spec.rb b/spec/controllers/import/manifest_controller_spec.rb
index 6b21b45e698..d5a498e80d9 100644
--- a/spec/controllers/import/manifest_controller_spec.rb
+++ b/spec/controllers/import/manifest_controller_spec.rb
@@ -74,7 +74,6 @@ RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state do
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1[:id])
expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2[:id])
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index fd7631edbbb..dc1fb0454df 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe InvitesController do
end
end
- describe 'GET #show' do
+ describe 'GET #show', :snowplow do
subject(:request) { get :show, params: params }
context 'when logged in' do
@@ -83,34 +83,75 @@ RSpec.describe InvitesController do
it_behaves_like 'invalid token'
end
- context 'when it is part of our invite email experiment' do
+ context 'when it is an initial invite email' do
let(:extra_params) { { invite_type: 'initial_email' } }
- it 'tracks the experiment' do
- experiment = double(track: true)
- allow(controller).to receive(:experiment).with('members/invite_email', actor: member).and_return(experiment)
-
+ it 'tracks the initial join click from email' do
request
- expect(experiment).to have_received(:track).with(:join_clicked)
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'join_clicked',
+ label: 'invite_email',
+ property: member.id.to_s
+ )
+ end
+
+ context 'when it is part of the invite_email_preview_text experiment' do
+ let(:extra_params) { { invite_type: 'initial_email', experiment_name: 'invite_email_preview_text' } }
+
+ it 'tracks the initial join click from email' do
+ experiment = double(track: true)
+ allow(controller).to receive(:experiment).with(:invite_email_preview_text, actor: member).and_return(experiment)
+
+ request
+
+ expect(experiment).to have_received(:track).with(:join_clicked)
+ end
+
+ context 'when member does not exist' do
+ let(:raw_invite_token) { '_bogus_token_' }
+
+ it 'does not track the experiment' do
+ expect(controller).not_to receive(:experiment).with(:invite_email_preview_text, actor: member)
+
+ request
+ end
+ end
end
context 'when member does not exist' do
let(:raw_invite_token) { '_bogus_token_' }
- it 'does not track the experiment' do
- expect(controller).not_to receive(:experiment).with('members/invite_email', actor: member)
-
+ it 'does not track join click' do
request
+
+ expect_no_snowplow_event(
+ category: described_class.name,
+ action: 'join_clicked',
+ label: 'invite_email'
+ )
end
end
end
- context 'when it is not part of our invite email experiment' do
- it 'does not track via experiment' do
- expect(controller).not_to receive(:experiment).with('members/invite_email', actor: member)
-
+ context 'when it is not an initial email' do
+ it 'does not track the join click' do
request
+
+ expect_no_snowplow_event(
+ category: described_class.name,
+ action: 'join_clicked',
+ label: 'invite_email'
+ )
+ end
+
+ context 'when it is not part of our invite email experiment' do
+ it 'does not track via experiment' do
+ expect(controller).not_to receive(:experiment).with(:invite_email_preview_text, actor: member)
+
+ request
+ end
end
end
diff --git a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
index 55bafa938a7..98f4db13a1d 100644
--- a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
+++ b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
@@ -4,20 +4,87 @@ require 'spec_helper'
RSpec.describe JiraConnect::AppDescriptorController do
describe '#show' do
+ let(:descriptor) do
+ json_response.deep_symbolize_keys
+ end
+
+ let(:logo_url) { %r{\Ahttp://test\.host/assets/gitlab_logo-\h+\.png\z} }
+
+ let(:common_module_properties) do
+ {
+ homeUrl: 'https://gitlab.com',
+ logoUrl: logo_url,
+ documentationUrl: 'https://docs.gitlab.com/ee/integration/jira/'
+ }
+ end
+
it 'returns JSON app descriptor' do
get :show
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to include(
- 'baseUrl' => 'https://test.host/-/jira_connect',
- 'lifecycle' => {
- 'installed' => '/events/installed',
- 'uninstalled' => '/events/uninstalled'
+
+ expect(descriptor).to include(
+ name: Atlassian::JiraConnect.app_name,
+ description: kind_of(String),
+ key: Atlassian::JiraConnect.app_key,
+ baseUrl: 'https://test.host/-/jira_connect',
+ lifecycle: {
+ installed: '/events/installed',
+ uninstalled: '/events/uninstalled'
+ },
+ vendor: {
+ name: 'GitLab',
+ url: 'https://gitlab.com'
},
- 'links' => {
- 'documentation' => 'http://test.host/help/integration/jira_development_panel#gitlabcom-1'
+ links: {
+ documentation: 'http://test.host/help/integration/jira_development_panel#gitlabcom-1'
+ },
+ authentication: {
+ type: 'jwt'
+ },
+ scopes: %w(READ WRITE DELETE),
+ apiVersion: 1,
+ apiMigrations: {
+ 'context-qsh': true,
+ gdpr: true
}
)
+
+ expect(descriptor[:modules]).to include(
+ postInstallPage: {
+ key: 'gitlab-configuration',
+ name: { value: 'GitLab Configuration' },
+ url: '/subscriptions'
+ },
+ jiraDevelopmentTool: {
+ actions: {
+ createBranch: {
+ templateUrl: 'http://test.host/-/jira_connect/branches/new?issue_key={issue.key}&issue_summary={issue.summary}'
+ }
+ },
+ key: 'gitlab-development-tool',
+ application: { value: 'GitLab' },
+ name: { value: 'GitLab' },
+ url: 'https://gitlab.com',
+ logoUrl: logo_url,
+ capabilities: %w(branch commit pull_request)
+ },
+ jiraBuildInfoProvider: common_module_properties.merge(
+ actions: {},
+ name: { value: 'GitLab CI' },
+ key: 'gitlab-ci'
+ ),
+ jiraDeploymentInfoProvider: common_module_properties.merge(
+ actions: {},
+ name: { value: 'GitLab Deployments' },
+ key: 'gitlab-deployments'
+ ),
+ jiraFeatureFlagInfoProvider: common_module_properties.merge(
+ actions: {},
+ name: { value: 'GitLab Feature Flags' },
+ key: 'gitlab-feature-flags'
+ )
+ )
end
end
end
diff --git a/spec/controllers/jira_connect/branches_controller_spec.rb b/spec/controllers/jira_connect/branches_controller_spec.rb
new file mode 100644
index 00000000000..45daf3b5309
--- /dev/null
+++ b/spec/controllers/jira_connect/branches_controller_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::BranchesController do
+ describe '#new' do
+ context 'when logged in' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'assigns the suggested branch name' do
+ get :new, params: { issue_key: 'ACME-123', issue_summary: 'My Issue !@#$%' }
+
+ expect(response).to be_successful
+ expect(assigns(:new_branch_data)).to include(
+ initial_branch_name: 'ACME-123-my-issue',
+ success_state_svg_path: start_with('/assets/illustrations/merge_requests-')
+ )
+ end
+
+ it 'ignores missing summary' do
+ get :new, params: { issue_key: 'ACME-123' }
+
+ expect(response).to be_successful
+ expect(assigns(:new_branch_data)).to include(initial_branch_name: 'ACME-123')
+ end
+
+ it 'does not set a branch name if key is not passed' do
+ get :new, params: { issue_summary: 'My issue' }
+
+ expect(response).to be_successful
+ expect(assigns(:new_branch_data)).to include('initial_branch_name': nil)
+ end
+ end
+
+ context 'when not logged in' do
+ it 'redirects to the login page' do
+ get :new
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/jira_connect/subscriptions_controller_spec.rb b/spec/controllers/jira_connect/subscriptions_controller_spec.rb
index 95b359a989a..e32915d55a1 100644
--- a/spec/controllers/jira_connect/subscriptions_controller_spec.rb
+++ b/spec/controllers/jira_connect/subscriptions_controller_spec.rb
@@ -7,9 +7,13 @@ RSpec.describe JiraConnect::SubscriptionsController do
describe '#index' do
before do
+ request.headers['Accept'] = content_type
+
get :index, params: { jwt: jwt }
end
+ let(:content_type) { 'text/html' }
+
context 'without JWT' do
let(:jwt) { nil }
@@ -29,13 +33,55 @@ RSpec.describe JiraConnect::SubscriptionsController do
it 'removes X-Frame-Options to allow rendering in iframe' do
expect(response.headers['X-Frame-Options']).to be_nil
end
+
+ context 'with JSON format' do
+ let_it_be(:subscription) { create(:jira_connect_subscription, installation: installation) }
+
+ let(:content_type) { 'application/json' }
+
+ it 'renders the relevant data as JSON', :aggregate_failures do
+ expect(json_response).to include('groups_path' => api_v4_groups_path(params: { min_access_level: Gitlab::Access::MAINTAINER, skip_groups: [subscription.namespace_id] }))
+ expect(json_response).to include(
+ 'subscriptions' => [
+ 'group' => {
+ 'name' => subscription.namespace.name,
+ 'avatar_url' => subscription.namespace.avatar_url,
+ 'full_name' => subscription.namespace.full_name,
+ 'description' => subscription.namespace.description
+ },
+ 'created_at' => subscription.created_at.iso8601(3),
+ 'unlink_path' => jira_connect_subscription_path(subscription)
+ ]
+ )
+ expect(json_response).to include('subscriptions_path' => jira_connect_subscriptions_path)
+ end
+
+ context 'when not signed in to GitLab' do
+ it 'contains a login path' do
+ expect(json_response).to include('login_path' => jira_connect_users_path)
+ end
+ end
+
+ context 'when signed in to GitLab' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ get :index, params: { jwt: jwt }
+ end
+
+ it 'does not contain a login path' do
+ expect(json_response).to include('login_path' => nil)
+ end
+ end
+ end
end
end
describe '#create' do
let(:group) { create(:group) }
let(:user) { create(:user) }
- let(:current_user) { user }
before do
group.add_maintainer(user)
diff --git a/spec/controllers/profiles/notifications_controller_spec.rb b/spec/controllers/profiles/notifications_controller_spec.rb
index 1ebf4363ba6..36ec36fb6f1 100644
--- a/spec/controllers/profiles/notifications_controller_spec.rb
+++ b/spec/controllers/profiles/notifications_controller_spec.rb
@@ -162,7 +162,7 @@ RSpec.describe Profiles::NotificationsController do
it 'shows an error message if the params are invalid' do
sign_in(user)
- put :update, params: { user: { notification_email: '' } }
+ put :update, params: { user: { notification_email: 'unverified@example.com' } }
expect(user.reload.notification_email).to eq('original@example.com')
expect(controller).to set_flash[:alert].to('Failed to save new settings')
diff --git a/spec/controllers/profiles/two_factor_auths_controller_spec.rb b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
index 59eb33f4bc6..818bf2a4ae6 100644
--- a/spec/controllers/profiles/two_factor_auths_controller_spec.rb
+++ b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
@@ -70,6 +70,12 @@ RSpec.describe Profiles::TwoFactorAuthsController do
go
end
+ it 'dismisses the `ACCOUNT_RECOVERY_REGULAR_CHECK` callout' do
+ expect(controller.helpers).to receive(:dismiss_account_recovery_regular_check)
+
+ go
+ end
+
it 'renders create' do
go
expect(response).to render_template(:create)
@@ -117,6 +123,12 @@ RSpec.describe Profiles::TwoFactorAuthsController do
user.reload
expect(user.otp_backup_codes).not_to be_empty
end
+
+ it 'dismisses the `ACCOUNT_RECOVERY_REGULAR_CHECK` callout' do
+ expect(controller.helpers).to receive(:dismiss_account_recovery_regular_check)
+
+ post :codes
+ end
end
describe 'DELETE destroy' do
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index 37a633afab4..b4019643baf 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -110,6 +110,17 @@ RSpec.describe ProfilesController, :request_store do
expect(user.reload.pronouns).to eq(pronouns)
expect(response).to have_gitlab_http_status(:found)
end
+
+ it 'allows updating user specified pronunciation', :aggregate_failures do
+ user = create(:user, name: 'Example')
+ pronunciation = 'uhg-zaam-pl'
+ sign_in(user)
+
+ put :update, params: { user: { pronunciation: pronunciation } }
+
+ expect(user.reload.pronunciation).to eq(pronunciation)
+ expect(response).to have_gitlab_http_status(:found)
+ end
end
describe 'GET audit_log' do
diff --git a/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb b/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
index 3bb841c7c9f..1351ba35a71 100644
--- a/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
+++ b/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
@@ -7,26 +7,58 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- let(:params) { { namespace_id: group, project_id: project, value_stream_id: 'default' } }
+ let(:params) do
+ {
+ namespace_id: group,
+ project_id: project,
+ value_stream_id: Analytics::CycleAnalytics::Stages::BaseService::DEFAULT_VALUE_STREAM_NAME
+ }
+ end
before do
sign_in(user)
end
- describe 'GET index' do
- context 'when user is member of the project' do
+ shared_examples 'project-level value stream analytics endpoint' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'succeeds' do
+ get action, params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ shared_examples 'project-level value stream analytics request error examples' do
+ context 'when invalid value stream id is given' do
before do
- project.add_developer(user)
+ params[:value_stream_id] = 1
end
- it 'succeeds' do
- get :index, params: params
+ it 'renders 404' do
+ get action, params: params
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:not_found)
end
+ end
+ context 'when user is not member of the project' do
+ it 'renders 404' do
+ get action, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET index' do
+ let(:action) { :index }
+
+ it_behaves_like 'project-level value stream analytics endpoint' do
it 'exposes the default stages' do
- get :index, params: params
+ get action, params: params
expect(json_response['stages'].size).to eq(Gitlab::Analytics::CycleAnalytics::DefaultStages.all.size)
end
@@ -37,31 +69,109 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
expect(list_service).to receive(:allowed?).and_return(false)
end
- get :index, params: params
+ get action, params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
- context 'when invalid value stream id is given' do
- before do
- params[:value_stream_id] = 1
+ it_behaves_like 'project-level value stream analytics request error examples'
+ end
+
+ describe 'GET median' do
+ let(:action) { :median }
+
+ before do
+ params[:id] = 'issue'
+ end
+
+ it_behaves_like 'project-level value stream analytics endpoint' do
+ it 'returns the median' do
+ result = 2
+
+ expect_next_instance_of(Gitlab::Analytics::CycleAnalytics::Median) do |instance|
+ expect(instance).to receive(:seconds).and_return(result)
+ end
+
+ get action, params: params
+
+ expect(json_response['value']).to eq(result)
end
+ end
- it 'renders 404' do
- get :index, params: params
+ it_behaves_like 'project-level value stream analytics request error examples'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ describe 'GET average' do
+ let(:action) { :average }
+
+ before do
+ params[:id] = 'issue'
+ end
+
+ it_behaves_like 'project-level value stream analytics endpoint' do
+ it 'returns the average' do
+ result = 2
+
+ expect_next_instance_of(Gitlab::Analytics::CycleAnalytics::Average) do |instance|
+ expect(instance).to receive(:seconds).and_return(result)
+ end
+
+ get action, params: params
+
+ expect(json_response['value']).to eq(result)
end
end
- context 'when user is not member of the project' do
- it 'renders 404' do
- get :index, params: params
+ it_behaves_like 'project-level value stream analytics request error examples'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ describe 'GET count' do
+ let(:action) { :count }
+
+ before do
+ params[:id] = 'issue'
+ end
+
+ it_behaves_like 'project-level value stream analytics endpoint' do
+ it 'returns the count' do
+ count = 2
+
+ expect_next_instance_of(Gitlab::Analytics::CycleAnalytics::DataCollector) do |instance|
+ expect(instance).to receive(:count).and_return(count)
+ end
+
+ get action, params: params
+
+ expect(json_response['count']).to eq(count)
end
end
+
+ it_behaves_like 'project-level value stream analytics request error examples'
+ end
+
+ describe 'GET records' do
+ let(:action) { :records }
+
+ before do
+ params[:id] = 'issue'
+ end
+
+ it_behaves_like 'project-level value stream analytics endpoint' do
+ it 'returns the records' do
+ result = Issue.none.page(1)
+
+ expect_next_instance_of(Gitlab::Analytics::CycleAnalytics::RecordsFetcher) do |instance|
+ expect(instance).to receive(:serialized_records).and_yield(result).and_return([])
+ end
+
+ get action, params: params
+
+ expect(json_response).to eq([])
+ end
+ end
+
+ it_behaves_like 'project-level value stream analytics request error examples'
end
end
diff --git a/spec/controllers/projects/clusters/applications_controller_spec.rb b/spec/controllers/projects/clusters/applications_controller_spec.rb
deleted file mode 100644
index cc6170252c1..00000000000
--- a/spec/controllers/projects/clusters/applications_controller_spec.rb
+++ /dev/null
@@ -1,215 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Clusters::ApplicationsController do
- include AccessMatchersForController
-
- def current_application
- Clusters::Cluster::APPLICATIONS[application]
- end
-
- shared_examples 'a secure endpoint' do
- it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
- expect { subject }.to be_allowed_for(:admin)
- end
- it 'is denied for admin when admin mode disabled' do
- expect { subject }.to be_denied_for(:admin)
- end
- it { expect { subject }.to be_allowed_for(:owner).of(project) }
- it { expect { subject }.to be_allowed_for(:maintainer).of(project) }
- it { expect { subject }.to be_denied_for(:developer).of(project) }
- it { expect { subject }.to be_denied_for(:reporter).of(project) }
- it { expect { subject }.to be_denied_for(:guest).of(project) }
- it { expect { subject }.to be_denied_for(:user) }
- it { expect { subject }.to be_denied_for(:external) }
- end
-
- describe 'POST create' do
- subject do
- post :create, params: params.merge(namespace_id: project.namespace, project_id: project)
- end
-
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
- let(:application) { 'ingress' }
- let(:params) { { application: application, id: cluster.id } }
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- it 'schedule an application installation' do
- expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
-
- expect { subject }.to change { current_application.count }
- expect(response).to have_gitlab_http_status(:no_content)
- expect(cluster.application_ingress).to be_scheduled
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it 'return 404' do
- expect { subject }.not_to change { current_application.count }
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when application is unknown' do
- let(:application) { 'unkwnown-app' }
-
- it 'return 404' do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when application is already installing' do
- before do
- create(:clusters_applications_ingress, :installing, cluster: cluster)
- end
-
- it 'returns 400' do
- is_expected.to have_gitlab_http_status(:bad_request)
- end
- end
- end
-
- describe 'security' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-
- describe 'PATCH update' do
- subject do
- patch :update, params: params.merge(namespace_id: project.namespace, project_id: project)
- end
-
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
- let!(:application) { create(:clusters_applications_knative, :installed, cluster: cluster) }
- let(:application_name) { application.name }
- let(:params) { { application: application_name, id: cluster.id, hostname: "new.example.com" } }
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- context "when cluster and app exists" do
- it "schedules an application update" do
- expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
-
- is_expected.to have_gitlab_http_status(:no_content)
-
- expect(cluster.application_knative).to be_scheduled
- end
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is unknown' do
- let(:application_name) { 'unkwnown-app' }
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is already scheduled' do
- before do
- application.make_scheduled!
- end
-
- it { is_expected.to have_gitlab_http_status(:bad_request) }
- end
- end
-
- describe 'security' do
- before do
- allow(ClusterPatchAppWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-
- describe 'DELETE destroy' do
- subject do
- delete :destroy, params: params.merge(namespace_id: project.namespace, project_id: project)
- end
-
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
- let!(:application) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
- let(:application_name) { application.name }
- let(:params) { { application: application_name, id: cluster.id } }
- let(:worker_class) { Clusters::Applications::UninstallWorker }
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- context "when cluster and app exists" do
- it "schedules an application update" do
- expect(worker_class).to receive(:perform_async).with(application.name, application.id).once
-
- is_expected.to have_gitlab_http_status(:no_content)
-
- expect(cluster.application_prometheus).to be_scheduled
- end
- end
-
- context 'when cluster do not exists' do
- before do
- cluster.destroy!
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is unknown' do
- let(:application_name) { 'unkwnown-app' }
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when application is already scheduled' do
- before do
- application.make_scheduled!
- end
-
- it { is_expected.to have_gitlab_http_status(:bad_request) }
- end
- end
-
- describe 'security' do
- before do
- allow(worker_class).to receive(:perform_async)
- end
-
- it_behaves_like 'a secure endpoint'
- end
- end
-end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 4cb90edb742..7103d7df5c5 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -200,11 +200,27 @@ RSpec.describe Projects::EnvironmentsController do
end
describe 'PATCH #update' do
- it 'responds with a 302' do
- patch_params = environment_params.merge(environment: { external_url: 'https://git.gitlab.com' })
- patch :update, params: patch_params
+ subject { patch :update, params: params }
- expect(response).to have_gitlab_http_status(:found)
+ context "when environment params are valid" do
+ let(:params) { environment_params.merge(environment: { external_url: 'https://git.gitlab.com' }) }
+
+ it 'returns ok and the path to the newly created environment' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['path']).to eq("/#{project.full_path}/-/environments/#{environment.id}")
+ end
+ end
+
+ context "when environment params are invalid" do
+ let(:params) { environment_params.merge(environment: { name: '/foo/', external_url: '/git.gitlab.com' }) }
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
@@ -786,6 +802,31 @@ RSpec.describe Projects::EnvironmentsController do
end
end
+ describe 'POST #create' do
+ subject { post :create, params: params }
+
+ context "when environment params are valid" do
+ let(:params) { { namespace_id: project.namespace, project_id: project, environment: { name: 'foo', external_url: 'https://foo.example.com' } } }
+
+ it 'returns ok and the path to the newly created environment' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['path']).to eq("/#{project.full_path}/-/environments/#{json_response['environment']['id']}")
+ end
+ end
+
+ context "when environment params are invalid" do
+ let(:params) { { namespace_id: project.namespace, project_id: project, environment: { name: 'foo/', external_url: '/foo.example.com' } } }
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
def environment_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace,
project_id: project,
diff --git a/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb b/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb
index 7c080504c31..19b6b597a84 100644
--- a/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb
+++ b/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Projects::ErrorTracking::StackTracesController do
context 'service result is successful' do
let(:service_response) { { status: :success, latest_event: error_event } }
- let(:error_event) { build(:error_tracking_error_event) }
+ let(:error_event) { build(:error_tracking_sentry_error_event) }
it 'responds with success' do
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/controllers/projects/error_tracking_controller_spec.rb b/spec/controllers/projects/error_tracking_controller_spec.rb
index 5ea885e4fd6..822778779eb 100644
--- a/spec/controllers/projects/error_tracking_controller_spec.rb
+++ b/spec/controllers/projects/error_tracking_controller_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe Projects::ErrorTrackingController do
get :index, params: params
end
- let(:error) { build(:error_tracking_error) }
+ let(:error) { build(:error_tracking_sentry_error) }
it 'returns a list of errors' do
expect(response).to have_gitlab_http_status(:ok)
@@ -126,7 +126,7 @@ RSpec.describe Projects::ErrorTrackingController do
.and_return(external_url)
end
- let(:error) { build(:error_tracking_error) }
+ let(:error) { build(:error_tracking_sentry_error) }
it 'returns a list of errors' do
get :index, params: project_params(format: :json)
@@ -221,7 +221,7 @@ RSpec.describe Projects::ErrorTrackingController do
get :details, params: issue_params(issue_id: issue_id, format: :json)
end
- let(:error) { build(:detailed_error_tracking_error) }
+ let(:error) { build(:error_tracking_sentry_detailed_error) }
it 'returns an error' do
expected_error = error.as_json.except('first_release_version').merge(
diff --git a/spec/controllers/projects/feature_flags_controller_spec.rb b/spec/controllers/projects/feature_flags_controller_spec.rb
index f809dd31b3b..e038b247eff 100644
--- a/spec/controllers/projects/feature_flags_controller_spec.rb
+++ b/spec/controllers/projects/feature_flags_controller_spec.rb
@@ -652,7 +652,7 @@ RSpec.describe Projects::FeatureFlagsController do
version: 'new_version_flag',
strategies_attributes: [{
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '15', stickiness: 'DEFAULT' },
+ parameters: { groupId: 'default', rollout: '15', stickiness: 'default' },
scopes_attributes: [{ environment_scope: 'production' }]
}]
}
@@ -666,7 +666,7 @@ RSpec.describe Projects::FeatureFlagsController do
strategy_json = json_response['strategies'].first
expect(strategy_json['name']).to eq('flexibleRollout')
- expect(strategy_json['parameters']).to eq({ 'groupId' => 'default', 'rollout' => '15', 'stickiness' => 'DEFAULT' })
+ expect(strategy_json['parameters']).to eq({ 'groupId' => 'default', 'rollout' => '15', 'stickiness' => 'default' })
expect(strategy_json['scopes'].count).to eq(1)
scope_json = strategy_json['scopes'].first
@@ -938,7 +938,7 @@ RSpec.describe Projects::FeatureFlagsController do
it 'creates a flexibleRollout strategy' do
put_request(new_version_flag, strategies_attributes: [{
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '30', stickiness: 'DEFAULT' }
+ parameters: { groupId: 'default', rollout: '30', stickiness: 'default' }
}])
expect(response).to have_gitlab_http_status(:ok)
@@ -948,7 +948,7 @@ RSpec.describe Projects::FeatureFlagsController do
expect(strategy_json['parameters']).to eq({
'groupId' => 'default',
'rollout' => '30',
- 'stickiness' => 'DEFAULT'
+ 'stickiness' => 'default'
})
expect(strategy_json['scopes']).to eq([])
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 922ecb6052a..0c29280316a 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1464,7 +1464,7 @@ RSpec.describe Projects::IssuesController do
}
end
- it 'updates issue' do
+ it 'updates issue', :enable_admin_mode do
post_spam
expect(issue.submittable_as_spam?).to be_falsey
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index a7a36d3a074..e9e7c3c3bb3 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -868,64 +868,85 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
describe 'POST cancel' do
- before do
- project.add_developer(user)
- sign_in(user)
- end
+ context 'when user is authorized to cancel the build' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
- context 'when continue url is present' do
- let(:job) { create(:ci_build, :cancelable, pipeline: pipeline) }
+ context 'when continue url is present' do
+ let(:job) { create(:ci_build, :cancelable, pipeline: pipeline) }
- context 'when continue to is a safe url' do
- let(:url) { '/test' }
+ context 'when continue to is a safe url' do
+ let(:url) { '/test' }
- before do
- post_cancel(continue: { to: url })
- end
+ before do
+ post_cancel(continue: { to: url })
+ end
- it 'redirects to the continue url' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(url)
+ it 'redirects to the continue url' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(url)
+ end
+
+ it 'transits to canceled' do
+ expect(job.reload).to be_canceled
+ end
end
- it 'transits to canceled' do
- expect(job.reload).to be_canceled
+ context 'when continue to is not a safe url' do
+ let(:url) { 'http://example.com' }
+
+ it 'raises an error' do
+ expect { cancel_with_redirect(url) }.to raise_error
+ end
end
end
- context 'when continue to is not a safe url' do
- let(:url) { 'http://example.com' }
+ context 'when continue url is not present' do
+ before do
+ post_cancel
+ end
+
+ context 'when job is cancelable' do
+ let(:job) { create(:ci_build, :cancelable, pipeline: pipeline) }
+
+ it 'redirects to the builds page' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(builds_namespace_project_pipeline_path(id: pipeline.id))
+ end
+
+ it 'transits to canceled' do
+ expect(job.reload).to be_canceled
+ end
+ end
+
+ context 'when job is not cancelable' do
+ let(:job) { create(:ci_build, :canceled, pipeline: pipeline) }
- it 'raises an error' do
- expect { cancel_with_redirect(url) }.to raise_error
+ it 'returns unprocessable_entity' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
end
end
end
- context 'when continue url is not present' do
+ context 'when user is not authorized to cancel the build' do
+ let!(:job) { create(:ci_build, :cancelable, pipeline: pipeline) }
+
before do
+ project.add_reporter(user)
+ sign_in(user)
+
post_cancel
end
- context 'when job is cancelable' do
- let(:job) { create(:ci_build, :cancelable, pipeline: pipeline) }
-
- it 'redirects to the builds page' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(builds_namespace_project_pipeline_path(id: pipeline.id))
- end
-
- it 'transits to canceled' do
- expect(job.reload).to be_canceled
- end
+ it 'responds with not_found' do
+ expect(response).to have_gitlab_http_status(:not_found)
end
- context 'when job is not cancelable' do
- let(:job) { create(:ci_build, :canceled, pipeline: pipeline) }
-
- it 'returns unprocessable_entity' do
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
+ it 'does not transit to canceled' do
+ expect(job.reload).not_to be_canceled
end
end
@@ -938,43 +959,60 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
describe 'POST unschedule' do
before do
- project.add_developer(user)
+ create(:protected_branch, :developers_can_merge, name: 'master', project: project)
+ end
- create(:protected_branch, :developers_can_merge,
- name: 'master', project: project)
+ context 'when user is authorized to unschedule the build' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
- sign_in(user)
+ post_unschedule
+ end
- post_unschedule
- end
+ context 'when job is scheduled' do
+ let(:job) { create(:ci_build, :scheduled, pipeline: pipeline) }
- context 'when job is scheduled' do
- let(:job) { create(:ci_build, :scheduled, pipeline: pipeline) }
+ it 'redirects to the unscheduled job page' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(namespace_project_job_path(id: job.id))
+ end
- it 'redirects to the unscheduled job page' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(namespace_project_job_path(id: job.id))
+ it 'transits to manual' do
+ expect(job.reload).to be_manual
+ end
end
- it 'transits to manual' do
- expect(job.reload).to be_manual
+ context 'when job is not scheduled' do
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+
+ it 'renders unprocessable_entity' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
end
end
- context 'when job is not scheduled' do
- let(:job) { create(:ci_build, pipeline: pipeline) }
+ context 'when user is not authorized to unschedule the build' do
+ let(:job) { create(:ci_build, :scheduled, pipeline: pipeline) }
- it 'renders unprocessable_entity' do
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ before do
+ project.add_reporter(user)
+ sign_in(user)
+
+ post_unschedule
+ end
+
+ it 'responds with not_found' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'does not transit to scheduled' do
+ expect(job.reload).not_to be_manual
end
end
def post_unschedule
- post :unschedule, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: job.id
- }
+ post :unschedule, params: { namespace_id: project.namespace, project_id: project, id: job.id }
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 707d074b5c1..3d7636b1f30 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::MergeRequests::DiffsController do
include ProjectForksHelper
+ include TrackingHelpers
shared_examples '404 for unexistent diffable' do
context 'when diffable does not exists' do
@@ -141,6 +142,24 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
describe 'GET diffs_metadata' do
+ shared_examples_for 'serializes diffs metadata with expected arguments' do
+ it 'returns success' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'serializes paginated merge request diff collection' do
+ expect_next_instance_of(DiffsMetadataSerializer) do |instance|
+ expect(instance).to receive(:represent)
+ .with(an_instance_of(collection), expected_options)
+ .and_call_original
+ end
+
+ subject
+ end
+ end
+
def go(extra_params = {})
params = {
namespace_id: project.namespace.to_param,
@@ -179,32 +198,25 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
context 'with valid diff_id' do
- it 'returns success' do
- go(diff_id: merge_request.merge_request_diff.id)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'serializes diffs metadata with expected arguments' do
- expected_options = {
- environment: nil,
- merge_request: merge_request,
- merge_request_diff: merge_request.merge_request_diff,
- merge_request_diffs: merge_request.merge_request_diffs,
- start_version: nil,
- start_sha: nil,
- commit: nil,
- latest_diff: true,
- only_context_commits: false
- }
+ subject { go(diff_id: merge_request.merge_request_diff.id) }
- expect_next_instance_of(DiffsMetadataSerializer) do |instance|
- expect(instance).to receive(:represent)
- .with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), expected_options)
- .and_call_original
+ it_behaves_like 'serializes diffs metadata with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
+ let(:expected_options) do
+ {
+ environment: nil,
+ merge_request: merge_request,
+ merge_request_diff: merge_request.merge_request_diff,
+ merge_request_diffs: merge_request.merge_request_diffs,
+ start_version: nil,
+ start_sha: nil,
+ commit: nil,
+ latest_diff: true,
+ only_context_commits: false,
+ allow_tree_conflicts: true,
+ merge_ref_head_diff: false
+ }
end
-
- go(diff_id: merge_request.merge_request_diff.id)
end
end
@@ -261,62 +273,75 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
context 'with MR regular diff params' do
- it 'returns success' do
- go
+ subject { go }
- expect(response).to have_gitlab_http_status(:ok)
+ it_behaves_like 'serializes diffs metadata with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
+ let(:expected_options) do
+ {
+ environment: nil,
+ merge_request: merge_request,
+ merge_request_diff: merge_request.merge_request_diff,
+ merge_request_diffs: merge_request.merge_request_diffs,
+ start_version: nil,
+ start_sha: nil,
+ commit: nil,
+ latest_diff: true,
+ only_context_commits: false,
+ allow_tree_conflicts: true,
+ merge_ref_head_diff: nil
+ }
+ end
end
+ end
- it 'serializes diffs metadata with expected arguments' do
- expected_options = {
- environment: nil,
- merge_request: merge_request,
- merge_request_diff: merge_request.merge_request_diff,
- merge_request_diffs: merge_request.merge_request_diffs,
- start_version: nil,
- start_sha: nil,
- commit: nil,
- latest_diff: true,
- only_context_commits: false
- }
+ context 'with commit param' do
+ subject { go(commit_id: merge_request.diff_head_sha) }
- expect_next_instance_of(DiffsMetadataSerializer) do |instance|
- expect(instance).to receive(:represent)
- .with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), expected_options)
- .and_call_original
+ it_behaves_like 'serializes diffs metadata with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::Commit }
+ let(:expected_options) do
+ {
+ environment: nil,
+ merge_request: merge_request,
+ merge_request_diff: nil,
+ merge_request_diffs: merge_request.merge_request_diffs,
+ start_version: nil,
+ start_sha: nil,
+ commit: merge_request.diff_head_commit,
+ latest_diff: nil,
+ only_context_commits: false,
+ allow_tree_conflicts: true,
+ merge_ref_head_diff: nil
+ }
end
-
- go
end
end
- context 'with commit param' do
- it 'returns success' do
- go(commit_id: merge_request.diff_head_sha)
+ context 'when display_merge_conflicts_in_diff is disabled' do
+ subject { go }
- expect(response).to have_gitlab_http_status(:ok)
+ before do
+ stub_feature_flags(display_merge_conflicts_in_diff: false)
end
- it 'serializes diffs metadata with expected arguments' do
- expected_options = {
- environment: nil,
- merge_request: merge_request,
- merge_request_diff: nil,
- merge_request_diffs: merge_request.merge_request_diffs,
- start_version: nil,
- start_sha: nil,
- commit: merge_request.diff_head_commit,
- latest_diff: nil,
- only_context_commits: false
- }
-
- expect_next_instance_of(DiffsMetadataSerializer) do |instance|
- expect(instance).to receive(:represent)
- .with(an_instance_of(Gitlab::Diff::FileCollection::Commit), expected_options)
- .and_call_original
+ it_behaves_like 'serializes diffs metadata with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
+ let(:expected_options) do
+ {
+ environment: nil,
+ merge_request: merge_request,
+ merge_request_diff: merge_request.merge_request_diff,
+ merge_request_diffs: merge_request.merge_request_diffs,
+ start_version: nil,
+ start_sha: nil,
+ commit: nil,
+ latest_diff: true,
+ only_context_commits: false,
+ allow_tree_conflicts: false,
+ merge_ref_head_diff: nil
+ }
end
-
- go(commit_id: merge_request.diff_head_sha)
end
end
end
@@ -423,7 +448,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
context 'when DNT is enabled' do
before do
- request.headers['DNT'] = '1'
+ stub_do_not_track('1')
end
it 'does not track any mr_diffs event' do
@@ -471,6 +496,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
merge_request: merge_request,
diff_view: :inline,
merge_ref_head_diff: nil,
+ allow_tree_conflicts: true,
pagination_data: {
total_pages: nil
}.merge(pagination_data)
@@ -589,6 +615,21 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'successful request'
end
+ context 'when display_merge_conflicts_in_diff is disabled' do
+ before do
+ stub_feature_flags(display_merge_conflicts_in_diff: false)
+ end
+
+ subject { go }
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(allow_tree_conflicts: false) }
+ end
+
+ it_behaves_like 'successful request'
+ end
+
it_behaves_like 'forked project with submodules'
it_behaves_like 'cached diff collection'
diff --git a/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb b/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb
index dc5a022eb7b..fc741d0f3f6 100644
--- a/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb
+++ b/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb
@@ -18,9 +18,9 @@ RSpec.describe Projects::Packages::InfrastructureRegistryController do
it_behaves_like 'returning response status', :ok
- context 'when the feature is disabled' do
+ context 'when the packages registry is not available' do
before do
- stub_feature_flags(infrastructure_registry_page: false)
+ stub_config(packages: { enabled: false })
end
it_behaves_like 'returning response status', :not_found
@@ -34,9 +34,9 @@ RSpec.describe Projects::Packages::InfrastructureRegistryController do
it_behaves_like 'returning response status', :ok
- context 'when the feature is disabled' do
+ context 'when the packages registry is not available' do
before do
- stub_feature_flags(infrastructure_registry_page: false)
+ stub_config(packages: { enabled: false })
end
it_behaves_like 'returning response status', :not_found
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 9ed43a251a2..be5c1f0d428 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -93,13 +93,12 @@ RSpec.describe Projects::ProjectMembersController do
let_it_be(:invited_member) { create(:project_member, :invited, project: project) }
before do
- project.add_maintainer(user)
sign_in(user)
end
context 'when user has `admin_project_member` permissions' do
before do
- allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(true)
+ project.add_maintainer(user)
end
it 'lists invited members' do
@@ -110,10 +109,6 @@ RSpec.describe Projects::ProjectMembersController do
end
context 'when user does not have `admin_project_member` permissions' do
- before do
- allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(false)
- end
-
it 'does not list invited members' do
get :index, params: { namespace_id: project.namespace, project_id: project }
@@ -127,13 +122,12 @@ RSpec.describe Projects::ProjectMembersController do
before do
project.request_access(access_requester_user)
- project.add_maintainer(user)
sign_in(user)
end
context 'when user has `admin_project_member` permissions' do
before do
- allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(true)
+ project.add_maintainer(user)
end
it 'lists access requests' do
@@ -144,10 +138,6 @@ RSpec.describe Projects::ProjectMembersController do
end
context 'when user does not have `admin_project_member` permissions' do
- before do
- allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(false)
- end
-
it 'does not list access requests' do
get :index, params: { namespace_id: project.namespace, project_id: project }
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index 5dee36ee7c2..2c25c7e20ea 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -33,15 +33,25 @@ RSpec.describe Projects::RawController do
end
context 'regular filename' do
- let(:filepath) { 'master/README.md' }
+ let(:filepath) { 'master/CONTRIBUTING.md' }
it 'delivers ASCII file' do
+ allow(Gitlab::Workhorse).to receive(:send_git_blob).and_call_original
+
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+
+ expect(Gitlab::Workhorse).to have_received(:send_git_blob) do |repository, blob|
+ expected_blob = project.repository.blob_at('master', 'CONTRIBUTING.md')
+
+ expect(repository).to eq(project.repository)
+ expect(blob.id).to eq(expected_blob.id)
+ expect(blob).to be_truncated
+ end
end
it_behaves_like 'project cache control headers'
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index baf3bde83bd..419b5c7e101 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -174,6 +174,8 @@ RSpec.describe Projects::ServicesController do
let(:redirect_url) { edit_project_service_path(project, integration) }
before do
+ stub_jira_integration_test
+
put :update, params: params
end
@@ -222,12 +224,48 @@ RSpec.describe Projects::ServicesController do
end
end
- context 'when param `inherit_from_id` is set to some value' do
- let(:instance_service) { create(:jira_integration, :instance) }
- let(:integration_params) { { inherit_from_id: instance_service.id } }
+ context 'when param `inherit_from_id` is set to an instance integration' do
+ let(:instance_integration) { create(:jira_integration, :instance, url: 'http://instance.com', password: 'instance') }
+ let(:integration_params) { { inherit_from_id: instance_integration.id, url: 'http://custom.com', password: 'custom' } }
+
+ it 'ignores submitted params and inherits instance settings' do
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: instance_integration.id,
+ url: instance_integration.url,
+ password: instance_integration.password
+ )
+ end
+ end
+
+ context 'when param `inherit_from_id` is set to a group integration' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
- it 'sets inherit_from_id to value' do
- expect(integration.reload.inherit_from_id).to eq(instance_service.id)
+ let(:group_integration) { create(:jira_integration, group: group, project: nil, url: 'http://group.com', password: 'group') }
+ let(:integration_params) { { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' } }
+
+ it 'ignores submitted params and inherits group settings' do
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: group_integration.id,
+ url: group_integration.url,
+ password: group_integration.password
+ )
+ end
+ end
+
+ context 'when param `inherit_from_id` is set to an unrelated group' do
+ let_it_be(:group) { create(:group) }
+
+ let(:group_integration) { create(:jira_integration, group: group, project: nil, url: 'http://group.com', password: 'group') }
+ let(:integration_params) { { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' } }
+
+ it 'ignores the param and saves the submitted settings' do
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: nil,
+ url: 'http://custom.com',
+ password: 'custom'
+ )
end
end
end
@@ -239,22 +277,39 @@ RSpec.describe Projects::ServicesController do
end
context 'when update succeeds' do
- let(:integration_params) { { url: 'http://example.com' } }
+ let(:integration_params) { { url: 'http://example.com', password: 'password' } }
- it 'returns JSON response with no errors' do
+ it 'returns success response' do
expect(response).to be_successful
- expect(json_response).to include('active' => true, 'errors' => {})
+ expect(json_response).to include(
+ 'active' => true,
+ 'errors' => {}
+ )
+ end
+ end
+
+ context 'when update fails with missing password' do
+ let(:integration_params) { { url: 'http://example.com' } }
+
+ it 'returns JSON response errors' do
+ expect(response).not_to be_successful
+ expect(json_response).to include(
+ 'active' => true,
+ 'errors' => {
+ 'password' => ["can't be blank"]
+ }
+ )
end
end
- context 'when update fails' do
- let(:integration_params) { { url: '' } }
+ context 'when update fails with invalid URL' do
+ let(:integration_params) { { url: '', password: 'password' } }
it 'returns JSON response with errors' do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to include(
'active' => true,
- 'errors' => { 'url' => ['must be a valid URL', %(can't be blank)] }
+ 'errors' => { 'url' => ['must be a valid URL', "can't be blank"] }
)
end
end
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index 1a6c0974f08..a388fc4620f 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Projects::SnippetsController do
}
end
- it 'updates the snippet' do
+ it 'updates the snippet', :enable_admin_mode do
mark_as_spam
expect(snippet.reload).not_to be_submittable_as_spam
@@ -181,6 +181,24 @@ RSpec.describe Projects::SnippetsController do
end
end
end
+
+ context 'when the project snippet is public' do
+ let_it_be(:project_snippet_public) { create(:project_snippet, :public, :repository, project: project, author: user) }
+
+ context 'when attempting to access from a different project route' do
+ subject { get action, params: { namespace_id: project.namespace, project_id: 42, id: project_snippet_public.to_param } }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with status 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 46c17d6a6fe..8afb80d9cc5 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -435,32 +435,6 @@ RSpec.describe ProjectsController do
end
end
- describe 'POST create' do
- let!(:project_params) do
- {
- path: 'foo',
- description: 'bar',
- namespace_id: user.namespace.id,
- visibility_level: Gitlab::VisibilityLevel::PUBLIC,
- initialize_with_readme: 1
- }
- end
-
- before do
- sign_in(user)
- end
-
- it 'tracks a created event for the new_project_readme experiment', :experiment do
- expect(experiment(:new_project_readme)).to track(
- :created,
- property: 'blank',
- value: 1
- ).with_context(actor: user).on_next_instance
-
- post :create, params: { project: project_params }
- end
- end
-
describe 'POST #archive' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
@@ -793,8 +767,7 @@ RSpec.describe ProjectsController do
id: project.path,
project: {
project_setting_attributes: {
- show_default_award_emojis: boolean_value,
- allow_editing_commit_messages: boolean_value
+ show_default_award_emojis: boolean_value
}
}
}
@@ -802,7 +775,33 @@ RSpec.describe ProjectsController do
project.reload
expect(project.show_default_award_emojis?).to eq(result)
- expect(project.allow_editing_commit_messages?).to eq(result)
+ end
+ end
+ end
+
+ context 'with project feature attributes' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:feature, :initial_value, :update_to) do
+ :metrics_dashboard_access_level | ProjectFeature::PRIVATE | ProjectFeature::ENABLED
+ :container_registry_access_level | ProjectFeature::ENABLED | ProjectFeature::PRIVATE
+ end
+
+ with_them do
+ it "updates the project_feature new" do
+ params = {
+ namespace_id: project.namespace,
+ id: project.path,
+ project: {
+ project_feature_attributes: {
+ "#{feature}": update_to
+ }
+ }
+ }
+
+ expect { put :update, params: params }.to change {
+ project.reload.project_feature.public_send(feature)
+ }.from(initial_value).to(update_to)
end
end
end
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index 6d34b56df09..034c9b3d1c0 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -60,10 +60,8 @@ RSpec.describe Registrations::WelcomeController do
end
describe '#update' do
- let(:email_opted_in) { '0' }
-
subject(:update) do
- patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false', email_opted_in: email_opted_in } }
+ patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
end
context 'without a signed in user' do
@@ -100,24 +98,6 @@ RSpec.describe Registrations::WelcomeController do
end
end
end
-
- context 'when the user opted in' do
- let(:email_opted_in) { '1' }
-
- it 'sets the email_opted_in field' do
- subject
-
- expect(controller.current_user.email_opted_in).to eq(true)
- end
- end
-
- context 'when the user opted out' do
- it 'sets the email_opted_in field' do
- subject
-
- expect(controller.current_user.email_opted_in).to eq(false)
- end
- end
end
end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 72aa9038c3e..301c60e89c8 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -155,34 +155,76 @@ RSpec.describe RegistrationsController do
end
context 'when registration is triggered from an accepted invite' do
- context 'when it is part of our invite email experiment', :experiment do
+ context 'when it is part from the initial invite email', :snowplow do
let_it_be(:member) { create(:project_member, :invited, invite_email: user_params.dig(:user, :email)) }
let(:originating_member_id) { member.id }
+ let(:extra_session_params) { {} }
let(:session_params) do
{
invite_email: user_params.dig(:user, :email),
originating_member_id: originating_member_id
- }
+ }.merge extra_session_params
end
context 'when member exists from the session key value' do
- it 'tracks the experiment' do
- expect(experiment('members/invite_email')).to track(:accepted)
- .with_context(actor: member)
- .on_next_instance
-
+ it 'tracks the invite acceptance' do
subject
+
+ expect_snowplow_event(
+ category: 'RegistrationsController',
+ action: 'accepted',
+ label: 'invite_email',
+ property: member.id.to_s
+ )
end
end
context 'when member does not exist from the session key value' do
let(:originating_member_id) { -1 }
- it 'tracks the experiment' do
- expect(experiment('members/invite_email')).not_to track(:accepted)
-
+ it 'does not track invite acceptance' do
subject
+
+ expect_no_snowplow_event(
+ category: 'RegistrationsController',
+ action: 'accepted',
+ label: 'invite_email'
+ )
+ end
+ end
+
+ context 'with the invite_email_preview_text experiment', :experiment do
+ let(:extra_session_params) { { invite_email_experiment_name: 'invite_email_preview_text' } }
+
+ context 'when member and invite_email_experiment_name exists from the session key value' do
+ it 'tracks the invite acceptance' do
+ expect(experiment(:invite_email_preview_text)).to track(:accepted)
+ .with_context(actor: member)
+ .on_next_instance
+
+ subject
+ end
+ end
+
+ context 'when member does not exist from the session key value' do
+ let(:originating_member_id) { -1 }
+
+ it 'does not track invite acceptance' do
+ expect(experiment(:invite_email_preview_text)).not_to track(:accepted)
+
+ subject
+ end
+ end
+
+ context 'when invite_email_experiment_name does not exist from the session key value' do
+ let(:extra_session_params) { {} }
+
+ it 'does not track invite acceptance' do
+ expect(experiment(:invite_email_preview_text)).not_to track(:accepted)
+
+ subject
+ end
end
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 3a2986f6cbe..e0870e17d99 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -53,6 +53,20 @@ RSpec.describe SearchController do
end
end
+ shared_examples_for 'support for active record query timeouts' do |action, params, method_to_stub, format|
+ before do
+ allow_next_instance_of(SearchService) do |service|
+ allow(service).to receive(method_to_stub).and_raise(ActiveRecord::QueryCanceled)
+ end
+ end
+
+ it 'renders a 408 when a timeout occurs' do
+ get action, params: params, format: format
+
+ expect(response).to have_gitlab_http_status(:request_timeout)
+ end
+ end
+
describe 'GET #show' do
it_behaves_like 'when the user cannot read cross project', :show, { search: 'hello' } do
it 'still allows accessing the search page' do
@@ -63,6 +77,7 @@ RSpec.describe SearchController do
end
it_behaves_like 'with external authorization service enabled', :show, { search: 'hello' }
+ it_behaves_like 'support for active record query timeouts', :show, { search: 'hello' }, :search_objects, :html
context 'uses the right partials depending on scope' do
using RSpec::Parameterized::TableSyntax
@@ -230,6 +245,7 @@ RSpec.describe SearchController do
describe 'GET #count' do
it_behaves_like 'when the user cannot read cross project', :count, { search: 'hello', scope: 'projects' }
it_behaves_like 'with external authorization service enabled', :count, { search: 'hello', scope: 'projects' }
+ it_behaves_like 'support for active record query timeouts', :count, { search: 'hello', scope: 'projects' }, :search_results, :json
it 'returns the result count for the given term and scope' do
create(:project, :public, name: 'hello world')
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 50d6ac8f23d..a82c44fcc44 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -231,7 +231,7 @@ RSpec.describe SnippetsController do
post :mark_as_spam, params: { id: public_snippet.id }
end
- it 'updates the snippet' do
+ it 'updates the snippet', :enable_admin_mode do
mark_as_spam
expect(public_snippet.reload).not_to be_submittable_as_spam
diff --git a/spec/crystalball_env.rb b/spec/crystalball_env.rb
index a7748cd6627..d606fe69cdf 100644
--- a/spec/crystalball_env.rb
+++ b/spec/crystalball_env.rb
@@ -6,7 +6,7 @@ module CrystalballEnv
extend self
def start!
- return unless ENV['CRYSTALBALL'] && ENV['CI_PIPELINE_SOURCE'] == 'schedule' && ENV['FREQUENCY'] == '2-hourly'
+ return unless ENV['CRYSTALBALL']
require 'crystalball'
require_relative '../tooling/lib/tooling/crystalball/coverage_lines_execution_detector'
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 18f2f7b54c4..7e4b8c53885 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -86,7 +86,8 @@ RSpec.describe 'Database schema' do
users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id],
users_star_projects: %w[user_id],
vulnerability_identifiers: %w[external_id],
- vulnerability_scanners: %w[external_id]
+ vulnerability_scanners: %w[external_id],
+ security_scans: %w[pipeline_id] # foreign key is not added as ci_pipeline table will be moved into different db soon
}.with_indifferent_access.freeze
context 'for table' do
diff --git a/spec/deprecation_toolkit_env.rb b/spec/deprecation_toolkit_env.rb
index 8e06dcb8c75..b95a8c599bf 100644
--- a/spec/deprecation_toolkit_env.rb
+++ b/spec/deprecation_toolkit_env.rb
@@ -47,16 +47,18 @@ module DeprecationToolkitEnv
end
# Taken from https://github.com/jeremyevans/ruby-warning/blob/1.1.0/lib/warning.rb#L18
+ # Note: When a spec fails due to this warning, please update the spec to address the deprecation.
def self.kwargs_warning
%r{warning: (?:Using the last argument (?:for `.+' )?as keyword parameters is deprecated; maybe \*\* should be added to the call|Passing the keyword argument (?:for `.+' )?as the last hash parameter is deprecated|Splitting the last argument (?:for `.+' )?into positional and keyword parameters is deprecated|The called method (?:`.+' )?is defined here)\n\z}
end
- # Allow these Gem paths to trigger keyword warnings as we upgrade these gems
- # one by one
+ # Note: No new exceptions should be added here, unless they are in external dependencies.
+ # In this case, we recommend to add a silence together with an issue to patch or update
+ # the dependency causing the problem.
+ # See https://gitlab.com/gitlab-org/gitlab/-/commit/aea37f506bbe036378998916d374966c031bf347#note_647515736
def self.allowed_kwarg_warning_paths
%w[
- asciidoctor-2.0.12/lib/asciidoctor/extensions.rb
- gitlab-labkit-0.20.0/lib/labkit/correlation/grpc/client_interceptor.rb
+ actionpack-6.1.3.2/lib/action_dispatch/routing/route_set.rb
]
end
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 9c03910cf66..b0788eec808 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -57,24 +57,23 @@ RSpec.describe ApplicationExperiment, :experiment do
end
describe "#publish" do
- it "doesn't track or publish to the client or database if we can't track", :snowplow do
- allow(subject).to receive(:should_track?).and_return(false)
-
- expect(subject).not_to receive(:publish_to_client)
- expect(subject).not_to receive(:publish_to_database)
-
- subject.publish
+ let(:should_track) { true }
- expect_no_snowplow_event
+ before do
+ allow(subject).to receive(:should_track?).and_return(should_track)
end
- it "tracks the assignment" do
- expect(subject).to receive(:track).with(:assignment)
-
+ it "tracks the assignment", :snowplow do
subject.publish
+
+ expect_snowplow_event(
+ category: 'namespaced/stub',
+ action: 'assignment',
+ context: [{ schema: anything, data: anything }]
+ )
end
- it "publishes the to the client" do
+ it "publishes to the client" do
expect(subject).to receive(:publish_to_client)
subject.publish
@@ -88,6 +87,16 @@ RSpec.describe ApplicationExperiment, :experiment do
subject.publish
end
+ context 'when we should not track' do
+ let(:should_track) { false }
+
+ it 'does not track an event to Snowplow', :snowplow do
+ subject.publish
+
+ expect_no_snowplow_event
+ end
+ end
+
describe "#publish_to_client" do
it "adds the data into Gon" do
signature = { key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' }
@@ -101,17 +110,34 @@ RSpec.describe ApplicationExperiment, :experiment do
expect { subject.publish_to_client }.not_to raise_error
end
+
+ context 'when we should not track' do
+ let(:should_track) { false }
+
+ it 'returns early' do
+ expect(Gon).not_to receive(:push)
+
+ subject.publish_to_client
+ end
+ end
end
- describe "#publish_to_database" do
+ describe '#publish_to_database' do
using RSpec::Parameterized::TableSyntax
- let(:context) { { context_key => context_value }}
- before do
- subject.record!
+ shared_examples 'does not record to the database' do
+ it 'does not create an experiment record' do
+ expect { subject.publish_to_database }.not_to change(Experiment, :count)
+ end
+
+ it 'does not create an experiment subject record' do
+ expect { subject.publish_to_database }.not_to change(ExperimentSubject, :count)
+ end
end
- context "when there's a usable subject" do
+ context 'when there is a usable subject' do
+ let(:context) { { context_key => context_value } }
+
where(:context_key, :context_value, :object_type) do
:namespace | build(:namespace) | :namespace
:group | build(:namespace) | :namespace
@@ -121,7 +147,7 @@ RSpec.describe ApplicationExperiment, :experiment do
end
with_them do
- it "creates an experiment and experiment subject record" do
+ it 'creates an experiment and experiment subject record' do
expect { subject.publish_to_database }.to change(Experiment, :count).by(1)
expect(Experiment.last.name).to eq('namespaced/stub')
@@ -130,22 +156,24 @@ RSpec.describe ApplicationExperiment, :experiment do
end
end
- context "when there's not a usable subject" do
+ context 'when there is not a usable subject' do
+ let(:context) { { context_key => context_value } }
+
where(:context_key, :context_value) do
:namespace | nil
:foo | :bar
end
with_them do
- it "doesn't create an experiment record" do
- expect { subject.publish_to_database }.not_to change(Experiment, :count)
- end
-
- it "doesn't create an experiment subject record" do
- expect { subject.publish_to_database }.not_to change(ExperimentSubject, :count)
- end
+ include_examples 'does not record to the database'
end
end
+
+ context 'but we should not track' do
+ let(:should_track) { false }
+
+ include_examples 'does not record to the database'
+ end
end
end
@@ -209,6 +237,40 @@ RSpec.describe ApplicationExperiment, :experiment do
end
end
+ describe "#process_redirect_url" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:url, :processed_url) do
+ 'https://about.gitlab.com/' | 'https://about.gitlab.com/'
+ 'https://gitlab.com/' | 'https://gitlab.com/'
+ 'http://docs.gitlab.com' | 'http://docs.gitlab.com'
+ 'https://docs.gitlab.com/some/path?foo=bar' | 'https://docs.gitlab.com/some/path?foo=bar'
+ 'http://badgitlab.com' | nil
+ 'https://gitlab.com.nefarious.net' | nil
+ 'https://unknown.gitlab.com' | nil
+ "https://badplace.com\nhttps://gitlab.com" | nil
+ 'https://gitlabbcom' | nil
+ 'https://gitlabbcom/' | nil
+ end
+
+ with_them do
+ it "returns the url or nil if invalid" do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ expect(subject.process_redirect_url(url)).to eq(processed_url)
+ end
+
+ it "considers all urls invalid when not on dev or com" do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ expect(subject.process_redirect_url(url)).to be_nil
+ end
+ end
+
+ it "generates the correct urls based on where the engine was mounted" do
+ url = Rails.application.routes.url_helpers.experiment_redirect_url(subject, url: 'https://docs.gitlab.com')
+ expect(url).to include("/-/experiment/namespaced%2Fstub:#{subject.context.key}?https://docs.gitlab.com")
+ end
+ end
+
context "when resolving variants" do
it "uses the default value as specified in the yaml" do
expect(Feature).to receive(:enabled?).with('namespaced_stub', subject, type: :experiment, default_enabled: :yaml)
diff --git a/spec/experiments/force_company_trial_experiment_spec.rb b/spec/experiments/force_company_trial_experiment_spec.rb
new file mode 100644
index 00000000000..42a3245771a
--- /dev/null
+++ b/spec/experiments/force_company_trial_experiment_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ForceCompanyTrialExperiment, :experiment do
+ subject { described_class.new(current_user: user) }
+
+ let(:user) { create(:user, setup_for_company: setup_for_company) }
+ let(:setup_for_company) { true }
+
+ context 'when a user is setup_for_company' do
+ it 'is not excluded' do
+ expect(subject).not_to exclude(user: user)
+ end
+ end
+
+ context 'when a user is not setup_for_company' do
+ let(:setup_for_company) { nil }
+
+ it 'is excluded' do
+ expect(subject).to exclude(user: user)
+ end
+ end
+end
diff --git a/spec/experiments/members/invite_email_experiment_spec.rb b/spec/experiments/members/invite_email_experiment_spec.rb
deleted file mode 100644
index 47ae6e529a1..00000000000
--- a/spec/experiments/members/invite_email_experiment_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Members::InviteEmailExperiment, :clean_gitlab_redis_shared_state do
- subject(:invite_email) { experiment('members/invite_email', **context) }
-
- let(:context) { { actor: double('Member', created_by: double('User', avatar_url: '_avatar_url_')) } }
-
- before do
- allow(invite_email).to receive(:enabled?).and_return(true)
- end
-
- describe ".initial_invite_email?" do
- it "is an initial invite email" do
- expect(described_class.initial_invite_email?('initial_email')).to be(true)
- end
-
- it "is not an initial invite email" do
- expect(described_class.initial_invite_email?('_bogus_')).to be(false)
- end
- end
-
- describe "exclusions", :experiment do
- it "excludes when created by is nil" do
- expect(experiment('members/invite_email')).to exclude(actor: double(created_by: nil))
- end
-
- it "excludes when avatar_url is nil" do
- member_without_avatar_url = double('Member', created_by: double('User', avatar_url: nil))
-
- expect(experiment('members/invite_email')).to exclude(actor: member_without_avatar_url)
- end
- end
-
- describe "variant resolution" do
- it "proves out round robin in variant selection", :aggregate_failures do
- instance_1 = described_class.new('members/invite_email', **context)
- allow(instance_1).to receive(:enabled?).and_return(true)
- instance_2 = described_class.new('members/invite_email', **context)
- allow(instance_2).to receive(:enabled?).and_return(true)
-
- instance_1.try { }
-
- expect(instance_1.variant.name).to eq('control')
-
- instance_2.try { }
-
- expect(instance_2.variant.name).to eq('activity')
- end
- end
-
- describe Members::RoundRobin do
- subject(:round_robin) { Members::RoundRobin.new('_key_', %i[variant1 variant2]) }
-
- describe "execute" do
- context "when there are 2 variants" do
- it "proves out round robin in selection", :aggregate_failures do
- expect(round_robin.execute).to eq :variant2
- expect(round_robin.execute).to eq :variant1
- expect(round_robin.execute).to eq :variant2
- end
- end
-
- context "when there are more than 2 variants" do
- subject(:round_robin) { Members::RoundRobin.new('_key_', %i[variant1 variant2 variant3]) }
-
- it "proves out round robin in selection", :aggregate_failures do
- expect(round_robin.execute).to eq :variant2
- expect(round_robin.execute).to eq :variant3
- expect(round_robin.execute).to eq :variant1
-
- expect(round_robin.execute).to eq :variant2
- expect(round_robin.execute).to eq :variant3
- expect(round_robin.execute).to eq :variant1
- end
- end
-
- context "when writing to cache fails" do
- subject(:round_robin) { Members::RoundRobin.new('_key_', []) }
-
- it "raises an error and logs" do
- allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(Members::RoundRobin::CacheError)
- expect(Gitlab::AppLogger).to receive(:warn)
-
- expect { round_robin.execute }.to raise_error(Members::RoundRobin::CacheError)
- end
- end
- end
-
- describe "#counter_expires_in" do
- it 'displays the expiration time in seconds' do
- round_robin.execute
-
- expect(round_robin.counter_expires_in).to be_between(0, described_class::COUNTER_EXPIRE_TIME)
- end
- end
-
- describe '#value' do
- it 'get the count' do
- expect(round_robin.counter_value).to eq(0)
-
- round_robin.execute
-
- expect(round_robin.counter_value).to eq(1)
- end
- end
-
- describe '#reset!' do
- it 'resets the count down to zero' do
- 3.times { round_robin.execute }
-
- expect { round_robin.reset! }.to change { round_robin.counter_value }.from(3).to(0)
- end
- end
- end
-end
diff --git a/spec/experiments/new_project_readme_content_experiment_spec.rb b/spec/experiments/new_project_readme_content_experiment_spec.rb
index 92a883078df..a6a81580a29 100644
--- a/spec/experiments/new_project_readme_content_experiment_spec.rb
+++ b/spec/experiments/new_project_readme_content_experiment_spec.rb
@@ -30,7 +30,9 @@ RSpec.describe NewProjectReadmeContentExperiment, :experiment do
end
it "renders redirect URLs" do
- expect(markdown).to include(Rails.application.routes.url_helpers.experiment_redirect_url(subject, initial_url))
+ url = Rails.application.routes.url_helpers.experiment_redirect_url(subject, url: initial_url)
+ expect(url).to include("/-/experiment/#{subject.to_param}?")
+ expect(markdown).to include(url)
end
end
end
diff --git a/spec/experiments/new_project_readme_experiment_spec.rb b/spec/experiments/new_project_readme_experiment_spec.rb
deleted file mode 100644
index e5ecc4662f6..00000000000
--- a/spec/experiments/new_project_readme_experiment_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe NewProjectReadmeExperiment, :experiment do
- subject { described_class.new(actor: actor) }
-
- let(:actor) { User.new(id: 42, created_at: Time.current) }
-
- describe "exclusions" do
- let(:threshold) { described_class::MAX_ACCOUNT_AGE }
-
- it { is_expected.to exclude(actor: User.new(created_at: (threshold + 1.minute).ago)) }
- it { is_expected.not_to exclude(actor: User.new(created_at: (threshold - 1.minute).ago)) }
- end
-
- describe "the control behavior" do
- subject { described_class.new(actor: actor).run(:control) }
-
- it { is_expected.to be false }
- end
-
- describe "the candidate behavior" do
- subject { described_class.new(actor: actor).run(:candidate) }
-
- it { is_expected.to be true }
- end
-
- context "when tracking initial writes" do
- let!(:project) { create(:project, :repository) }
-
- before do
- stub_experiments(new_project_readme: :control)
- end
-
- it "tracks an event for the first commit on a project with a repository" do
- expect(subject).to receive(:commit_count_for).with(project, default_count: described_class::INITIAL_WRITE_LIMIT, max_count: described_class::INITIAL_WRITE_LIMIT, experiment: 'new_project_readme').and_return(1)
- expect(subject).to receive(:track).with(:write, property: project.created_at.to_s, value: 1).and_call_original
-
- subject.track_initial_writes(project)
- end
-
- it "tracks an event for the second commit on a project with a repository" do
- allow(subject).to receive(:commit_count_for).and_return(2)
-
- expect(subject).to receive(:track).with(:write, property: project.created_at.to_s, value: 2).and_call_original
-
- subject.track_initial_writes(project)
- end
-
- it "doesn't track if the repository has more then 2 commits" do
- allow(subject).to receive(:commit_count_for).and_return(3)
-
- expect(subject).not_to receive(:track)
-
- subject.track_initial_writes(project)
- end
-
- it "doesn't track when we generally shouldn't" do
- allow(subject).to receive(:should_track?).and_return(false)
-
- expect(subject).not_to receive(:track)
-
- subject.track_initial_writes(project)
- end
-
- it "doesn't track if the project is older" do
- expect(project).to receive(:created_at).and_return(described_class::EXPERIMENT_START_DATE - 1.minute)
-
- expect(subject).not_to receive(:track)
-
- subject.track_initial_writes(project)
- end
- end
-end
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index f63a3c9f7f5..589a62a68bb 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -15,9 +15,9 @@ FactoryBot.define do
end
end
- trait :with_issue do
+ trait :with_incident do
after(:create) do |alert|
- create(:issue, alert_management_alert: alert, project: alert.project)
+ create(:incident, alert_management_alert: alert, project: alert.project)
end
end
@@ -128,7 +128,7 @@ FactoryBot.define do
end
trait :all_fields do
- with_issue
+ with_incident
with_assignee
with_fingerprint
with_service
diff --git a/spec/factories/ci/build_trace_section_names.rb b/spec/factories/ci/build_trace_section_names.rb
deleted file mode 100644
index b9b66b49317..00000000000
--- a/spec/factories/ci/build_trace_section_names.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :ci_build_trace_section_name, class: 'Ci::BuildTraceSectionName' do
- sequence(:name) { |n| "section_#{n}" }
- project factory: :project
- end
-end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 0e535aeaa8d..f3500301e22 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -120,6 +120,19 @@ FactoryBot.define do
end
end
+ trait :environment_with_deployment_tier do
+ environment { 'test_portal' }
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'test_portal',
+ action: 'start',
+ url: 'http://staging.example.com/$CI_JOB_NAME',
+ deployment_tier: 'testing' }
+ }
+ end
+ end
+
trait :deploy_to_production do
environment { 'production' }
@@ -224,8 +237,13 @@ FactoryBot.define do
# to the job. If `build.deployment` has already been set, it doesn't
# build a new instance.
environment = Gitlab::Ci::Pipeline::Seed::Environment.new(build).to_resource
- build.deployment =
- Gitlab::Ci::Pipeline::Seed::Deployment.new(build, environment).to_resource
+
+ build.assign_attributes(
+ deployment: Gitlab::Ci::Pipeline::Seed::Deployment.new(build, environment).to_resource,
+ metadata_attributes: {
+ expanded_environment_name: environment.name
+ }
+ )
end
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 642437b1119..2f4eb99a073 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -287,6 +287,76 @@ FactoryBot.define do
end
end
+ trait :common_security_report do
+ file_format { :raw }
+ file_type { :dependency_scanning }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/master/gl-common-scanning-report.json'), 'application/json')
+ end
+ end
+
+ trait :common_security_report_with_blank_names do
+ file_format { :raw }
+ file_type { :dependency_scanning }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/master/gl-common-scanning-report-names.json'), 'application/json')
+ end
+ end
+
+ trait :sast_deprecated do
+ file_type { :sast }
+ file_format { :raw }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/deprecated/gl-sast-report.json'), 'application/json')
+ end
+ end
+
+ trait :sast_with_corrupted_data do
+ file_type { :sast }
+ file_format { :raw }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/trace/sample_trace'), 'application/json')
+ end
+ end
+
+ trait :sast_feature_branch do
+ file_format { :raw }
+ file_type { :sast }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/feature-branch/gl-sast-report.json'), 'application/json')
+ end
+ end
+
+ trait :secret_detection_feature_branch do
+ file_format { :raw }
+ file_type { :secret_detection }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json'), 'application/json')
+ end
+ end
+
+ trait :sast_with_missing_scanner do
+ file_type { :sast }
+ file_format { :raw }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/master/gl-sast-missing-scanner.json'), 'application/json')
+ end
+ end
+
trait :secret_detection do
file_type { :secret_detection }
file_format { :raw }
diff --git a/spec/factories/ci/pending_builds.rb b/spec/factories/ci/pending_builds.rb
index 90779ae8ab9..fbd76e07d8e 100644
--- a/spec/factories/ci/pending_builds.rb
+++ b/spec/factories/ci/pending_builds.rb
@@ -6,5 +6,7 @@ FactoryBot.define do
project
protected { build.protected }
instance_runners_enabled { true }
+ namespace { project.namespace }
+ minutes_exceeded { false }
end
end
diff --git a/spec/factories/ci/reports/security/aggregated_reports.rb b/spec/factories/ci/reports/security/aggregated_reports.rb
new file mode 100644
index 00000000000..eb678dc9766
--- /dev/null
+++ b/spec/factories/ci/reports/security/aggregated_reports.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_aggregated_reports, class: '::Gitlab::Ci::Reports::Security::AggregatedReport' do
+ reports { FactoryBot.build_list(:ci_reports_security_report, 1) }
+ findings { FactoryBot.build_list(:ci_reports_security_finding, 1) }
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::AggregatedReport.new(reports, findings)
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/finding_keys.rb b/spec/factories/ci/reports/security/finding_keys.rb
new file mode 100644
index 00000000000..f00a043012e
--- /dev/null
+++ b/spec/factories/ci/reports/security/finding_keys.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_finding_key, class: '::Gitlab::Ci::Reports::Security::FindingKey' do
+ sequence :location_fingerprint do |a|
+ Digest::SHA1.hexdigest(a.to_s)
+ end
+ sequence :identifier_fingerprint do |a|
+ Digest::SHA1.hexdigest(a.to_s)
+ end
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::FindingKey.new(**attributes)
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/findings.rb b/spec/factories/ci/reports/security/findings.rb
new file mode 100644
index 00000000000..e3971bc48f3
--- /dev/null
+++ b/spec/factories/ci/reports/security/findings.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_finding, class: '::Gitlab::Ci::Reports::Security::Finding' do
+ compare_key { "#{identifiers.first&.external_type}:#{identifiers.first&.external_id}:#{location.fingerprint}" }
+ confidence { :medium }
+ identifiers { Array.new(1) { association(:ci_reports_security_identifier) } }
+ location factory: :ci_reports_security_locations_sast
+ metadata_version { 'sast:1.0' }
+ name { 'Cipher with no integrity' }
+ report_type { :sast }
+ raw_metadata do
+ {
+ description: "The cipher does not provide data integrity update 1",
+ solution: "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.",
+ location: {
+ file: "maven/src/main/java/com/gitlab/security_products/tests/App.java",
+ start_line: 29,
+ end_line: 29,
+ class: "com.gitlab.security_products.tests.App",
+ method: "insecureCypher"
+ },
+ links: [
+ {
+ name: "Cipher does not check for integrity first?",
+ url: "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first"
+ }
+ ]
+ }.to_json
+ end
+ scanner factory: :ci_reports_security_scanner
+ severity { :high }
+ scan factory: :ci_reports_security_scan
+ sequence(:uuid) do |n|
+ ::Security::VulnerabilityUUID.generate(
+ report_type: report_type,
+ primary_identifier_fingerprint: identifiers.first&.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: n
+ )
+ end
+ vulnerability_finding_signatures_enabled { false }
+
+ skip_create
+
+ trait :dynamic do
+ location { association(:ci_reports_security_locations_sast, :dynamic) }
+ end
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Finding.new(**attributes)
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/locations/sast.rb b/spec/factories/ci/reports/security/locations/sast.rb
new file mode 100644
index 00000000000..59b54ecd8f2
--- /dev/null
+++ b/spec/factories/ci/reports/security/locations/sast.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_locations_sast, class: '::Gitlab::Ci::Reports::Security::Locations::Sast' do
+ file_path { 'maven/src/main/java/com/gitlab/security_products/tests/App.java' }
+ start_line { 29 }
+ end_line { 31 }
+ class_name { 'com.gitlab.security_products.tests.App' }
+ method_name { 'insecureCypher' }
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Locations::Sast.new(**attributes)
+ end
+
+ trait :dynamic do
+ sequence(:file_path, 'a') { |n| "path/#{n}" }
+ start_line { Random.rand(20) }
+ end_line { start_line + Random.rand(5) }
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/reports.rb b/spec/factories/ci/reports/security/reports.rb
new file mode 100644
index 00000000000..5699b8fee3e
--- /dev/null
+++ b/spec/factories/ci/reports/security/reports.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_report, class: '::Gitlab::Ci::Reports::Security::Report' do
+ type { :sast }
+ pipeline { association(:ci_pipeline) }
+ created_at { 2.weeks.ago }
+ scanned_resources { [] }
+
+ transient do
+ findings { [] }
+ scanners { [] }
+ identifiers { [] }
+ end
+
+ after :build do |report, evaluator|
+ evaluator.scanners.each { |s| report.add_scanner(s) }
+ evaluator.identifiers.each { |id| report.add_identifier(id) }
+ evaluator.findings.each { |o| report.add_finding(o) }
+ end
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Report.new(type, pipeline, created_at)
+ end
+ end
+end
diff --git a/spec/factories/customer_relations/organizations.rb b/spec/factories/customer_relations/organizations.rb
new file mode 100644
index 00000000000..b6efd46f1a4
--- /dev/null
+++ b/spec/factories/customer_relations/organizations.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :organization, class: 'CustomerRelations::Organization' do
+ group
+
+ name { generate(:name) }
+ end
+end
diff --git a/spec/factories/deploy_tokens.rb b/spec/factories/deploy_tokens.rb
index d4127f78ebf..b2c478fd3fe 100644
--- a/spec/factories/deploy_tokens.rb
+++ b/spec/factories/deploy_tokens.rb
@@ -35,9 +35,13 @@ FactoryBot.define do
end
trait :all_scopes do
- write_registry { true}
+ write_registry { true }
read_package_registry { true }
write_package_registry { true }
end
+
+ trait :dependency_proxy_scopes do
+ write_registry { true }
+ end
end
end
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index c4fb330a0da..c23a67fe95b 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -39,7 +39,7 @@ FactoryBot.define do
sha = commit_version[action]
version = DesignManagement::Version.new(sha: sha, issue: issue, author: evaluator.author)
version.save!(validate: false) # We need it to have an ID, validate later
- Gitlab::Database.bulk_insert(dv_table_name, [action.row_attrs(version)]) # rubocop:disable Gitlab/BulkInsert
+ Gitlab::Database.main.bulk_insert(dv_table_name, [action.row_attrs(version)]) # rubocop:disable Gitlab/BulkInsert
end
# always a creation
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 148ee64fb08..0a9255e1abe 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -75,6 +75,11 @@ FactoryBot.define do
auto_stop_at { 1.day.ago }
end
+ trait :auto_deletable do
+ state { :stopped }
+ auto_delete_at { 1.day.ago }
+ end
+
trait :will_auto_stop do
auto_stop_at { 1.day.from_now }
end
diff --git a/spec/factories/error_tracking/client_key.rb b/spec/factories/error_tracking/client_key.rb
new file mode 100644
index 00000000000..eb5acc8dab7
--- /dev/null
+++ b/spec/factories/error_tracking/client_key.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :error_tracking_client_key, class: 'ErrorTracking::ClientKey' do
+ project
+ active { true }
+
+ trait :disabled do
+ active { false }
+ end
+ end
+end
diff --git a/spec/factories/error_tracking/detailed_error.rb b/spec/factories/error_tracking/detailed_error.rb
index 83004ffae38..c2e5741b150 100644
--- a/spec/factories/error_tracking/detailed_error.rb
+++ b/spec/factories/error_tracking/detailed_error.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :detailed_error_tracking_error, parent: :error_tracking_error, class: 'Gitlab::ErrorTracking::DetailedError' do
+ factory :error_tracking_sentry_detailed_error, parent: :error_tracking_sentry_error, class: 'Gitlab::ErrorTracking::DetailedError' do
gitlab_issue { 'http://gitlab.example.com/issues/1' }
external_base_url { 'http://example.com' }
first_release_last_commit { '68c914da9' }
diff --git a/spec/factories/error_tracking/error.rb b/spec/factories/error_tracking/error.rb
index e5f2e2ca9a7..bebdffb3614 100644
--- a/spec/factories/error_tracking/error.rb
+++ b/spec/factories/error_tracking/error.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :error_tracking_error, class: 'Gitlab::ErrorTracking::Error' do
+ # There is an issue to rename this class https://gitlab.com/gitlab-org/gitlab/-/issues/323342.
+ factory :error_tracking_sentry_error, class: 'Gitlab::ErrorTracking::Error' do
id { '1' }
title { 'title' }
type { 'error' }
@@ -25,4 +26,19 @@ FactoryBot.define do
skip_create
end
+
+ factory :error_tracking_error, class: 'ErrorTracking::Error' do
+ project
+ name { 'ActionView::MissingTemplate' }
+ description { 'Missing template posts/edit' }
+ actor { 'PostsController#edit' }
+ platform { 'ruby' }
+ first_seen_at { Time.now.iso8601 }
+ last_seen_at { Time.now.iso8601 }
+ status { 'unresolved' }
+
+ trait :resolved do
+ status { 'resolved' }
+ end
+ end
end
diff --git a/spec/factories/error_tracking/error_event.rb b/spec/factories/error_tracking/error_event.rb
index 880fdf17fae..9620e3999d6 100644
--- a/spec/factories/error_tracking/error_event.rb
+++ b/spec/factories/error_tracking/error_event.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :error_tracking_error_event, class: 'Gitlab::ErrorTracking::ErrorEvent' do
+ # There is an issue to rename this class https://gitlab.com/gitlab-org/gitlab/-/issues/323342.
+ factory :error_tracking_sentry_error_event, class: 'Gitlab::ErrorTracking::ErrorEvent' do
issue_id { 'id' }
date_received { Time.now.iso8601 }
stack_trace_entries do
@@ -53,4 +54,14 @@ FactoryBot.define do
skip_create
end
+
+ factory :error_tracking_error_event, class: 'ErrorTracking::ErrorEvent' do
+ error factory: :error_tracking_error
+
+ description { 'ActionView::MissingTemplate' }
+ environment { 'development' }
+ level { 'error' }
+ occurred_at { Time.now.iso8601 }
+ payload { Gitlab::Json.parse(File.read(Rails.root.join('spec/fixtures/', 'error_tracking/parsed_event.json'))) }
+ end
end
diff --git a/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb b/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb
new file mode 100644
index 00000000000..d6b4b90bbd0
--- /dev/null
+++ b/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :postgres_async_index, class: 'Gitlab::Database::AsyncIndexes::PostgresAsyncIndex' do
+ sequence(:name) { |n| "users_id_#{n}" }
+ definition { "CREATE INDEX #{name} ON #{table_name} (id)" }
+ table_name { "users" }
+ end
+end
diff --git a/spec/factories/incident_management/issuable_escalation_statuses.rb b/spec/factories/incident_management/issuable_escalation_statuses.rb
new file mode 100644
index 00000000000..54d0887f386
--- /dev/null
+++ b/spec/factories/incident_management/issuable_escalation_statuses.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :incident_management_issuable_escalation_status, class: 'IncidentManagement::IssuableEscalationStatus' do
+ issue
+ triggered
+
+ trait :triggered do
+ status { ::IncidentManagement::IssuableEscalationStatus.status_value(:triggered) }
+ end
+
+ trait :acknowledged do
+ status { ::IncidentManagement::IssuableEscalationStatus.status_value(:acknowledged) }
+ end
+
+ trait :resolved do
+ status { ::IncidentManagement::IssuableEscalationStatus.status_value(:resolved) }
+ resolved_at { Time.current }
+ end
+
+ trait :ignored do
+ status { ::IncidentManagement::IssuableEscalationStatus.status_value(:ignored) }
+ end
+ end
+end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index ed8a562b331..a5a17ca4058 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -53,7 +53,7 @@ FactoryBot.define do
transient do
create_data { true }
url { 'https://jira.example.com' }
- api_url { nil }
+ api_url { '' }
username { 'jira_username' }
password { 'jira_password' }
jira_issue_transition_automatic { false }
diff --git a/spec/factories/packages/debian/distribution.rb b/spec/factories/packages/debian/distribution.rb
index 619308e4e18..2142dba974b 100644
--- a/spec/factories/packages/debian/distribution.rb
+++ b/spec/factories/packages/debian/distribution.rb
@@ -13,8 +13,18 @@ FactoryBot.define do
end
trait(:with_file) do
+ file_signature do
+ <<~EOF
+ -----BEGIN PGP SIGNATURE-----
+
+ ABC
+ -----BEGIN PGP SIGNATURE-----
+ EOF
+ end
+
after(:build) do |distribution, evaluator|
distribution.file = fixture_file_upload('spec/fixtures/packages/debian/distribution/Release')
+ distribution.signed_file = fixture_file_upload('spec/fixtures/packages/debian/distribution/InRelease')
end
end
diff --git a/spec/factories/packages/debian/distribution_key.rb b/spec/factories/packages/debian/distribution_key.rb
index 6bd457c50d0..94e041eb4a9 100644
--- a/spec/factories/packages/debian/distribution_key.rb
+++ b/spec/factories/packages/debian/distribution_key.rb
@@ -4,9 +4,9 @@ FactoryBot.define do
factory :debian_project_distribution_key, class: 'Packages::Debian::ProjectDistributionKey' do
distribution { association(:debian_project_distribution) }
- private_key { '-----BEGIN PGP PRIVATE KEY BLOCK-----' }
+ private_key { File.read(Rails.root.join('spec/fixtures/', 'private_key.asc')) }
passphrase { '12345' }
- public_key { '-----BEGIN PGP PUBLIC KEY BLOCK-----' }
+ public_key { File.read(Rails.root.join('spec/fixtures/', 'public_key.asc')) }
fingerprint { '12345' }
factory :debian_group_distribution_key, class: 'Packages::Debian::GroupDistributionKey' do
diff --git a/spec/factories/packages/debian/file_metadatum.rb b/spec/factories/packages/debian/file_metadatum.rb
index f761dd18b4e..505b9975f79 100644
--- a/spec/factories/packages/debian/file_metadatum.rb
+++ b/spec/factories/packages/debian/file_metadatum.rb
@@ -26,7 +26,27 @@ FactoryBot.define do
file_type { 'dsc' }
component { 'main' }
architecture { nil }
- fields { { 'a': 'b' } }
+ fields do
+ {
+ 'Format' => '3.0 (native)',
+ 'Source' => package_file.package.name,
+ 'Binary' => 'sample-dev, libsample0, sample-udeb',
+ 'Architecture' => 'any',
+ 'Version': package_file.package.version,
+ 'Maintainer' => "#{FFaker::Name.name} <#{FFaker::Internet.email}>",
+ 'Homepage' => FFaker::Internet.http_url,
+ 'Standards-Version' => '4.5.0',
+ 'Build-Depends' => 'debhelper-compat (= 13)',
+ 'Package-List' => <<~EOF.rstrip,
+ libsample0 deb libs optional arch=any',
+ sample-dev deb libdevel optional arch=any',
+ sample-udeb udeb libs optional arch=any',
+ EOF
+ 'Checksums-Sha1' => "\nc5cfc111ea924842a89a06d5673f07dfd07de8ca 864 sample_1.2.3~alpha2.tar.xz",
+ 'Checksums-Sha256' => "\n40e4682bb24a73251ccd7c7798c0094a649091e5625d6a14bcec9b4e7174f3da 864 sample_1.2.3~alpha2.tar.xz",
+ 'Files' => "\nd5ca476e4229d135a88f9c729c7606c9 864 sample_1.2.3~alpha2.tar.xz"
+ }
+ end
end
trait(:deb) do
diff --git a/spec/factories/project_error_tracking_settings.rb b/spec/factories/project_error_tracking_settings.rb
index e09d58d293f..424f462e1a0 100644
--- a/spec/factories/project_error_tracking_settings.rb
+++ b/spec/factories/project_error_tracking_settings.rb
@@ -12,5 +12,9 @@ FactoryBot.define do
trait :disabled do
enabled { false }
end
+
+ trait :integrated do
+ integrated { true }
+ end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 84686c58a8e..fb86f4672bc 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -34,6 +34,7 @@ FactoryBot.define do
end
metrics_dashboard_access_level { ProjectFeature::PRIVATE }
operations_access_level { ProjectFeature::ENABLED }
+ container_registry_access_level { ProjectFeature::ENABLED }
# we can't assign the delegated `#ci_cd_settings` attributes directly, as the
# `#ci_cd_settings` relation needs to be created first
@@ -50,7 +51,7 @@ FactoryBot.define do
ci_job_token_scope_enabled { nil }
end
- before(:create) do |project, evaluator|
+ after(:build) do |project, evaluator|
# Builds and MRs can't have higher visibility level than repository access level.
builds_access_level = [evaluator.builds_access_level, evaluator.repository_access_level].min
merge_requests_access_level = [evaluator.merge_requests_access_level, evaluator.repository_access_level].min
@@ -66,7 +67,8 @@ FactoryBot.define do
pages_access_level: evaluator.pages_access_level,
metrics_dashboard_access_level: evaluator.metrics_dashboard_access_level,
operations_access_level: evaluator.operations_access_level,
- analytics_access_level: evaluator.analytics_access_level
+ analytics_access_level: evaluator.analytics_access_level,
+ container_registry_access_level: evaluator.container_registry_access_level
}
project.build_project_feature(hash)
@@ -263,7 +265,6 @@ FactoryBot.define do
trait :remote_mirror do
transient do
- remote_name { "remote_mirror_#{SecureRandom.hex}" }
url { "http://foo.com" }
enabled { true }
end
@@ -344,6 +345,9 @@ FactoryBot.define do
trait(:analytics_enabled) { analytics_access_level { ProjectFeature::ENABLED } }
trait(:analytics_disabled) { analytics_access_level { ProjectFeature::DISABLED } }
trait(:analytics_private) { analytics_access_level { ProjectFeature::PRIVATE } }
+ trait(:container_registry_enabled) { container_registry_access_level { ProjectFeature::ENABLED } }
+ trait(:container_registry_disabled) { container_registry_access_level { ProjectFeature::DISABLED } }
+ trait(:container_registry_private) { container_registry_access_level { ProjectFeature::PRIVATE } }
trait :auto_devops do
association :auto_devops, factory: :project_auto_devops
diff --git a/spec/factories/projects/ci_feature_usages.rb b/spec/factories/projects/ci_feature_usages.rb
new file mode 100644
index 00000000000..1ab1d82ef4b
--- /dev/null
+++ b/spec/factories/projects/ci_feature_usages.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_ci_feature_usage, class: 'Projects::CiFeatureUsage' do
+ project factory: :project
+ feature { :code_coverage } # rubocop: disable RSpec/EmptyExampleGroup
+ default_branch { false }
+ end
+end
diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb
index b276e6f8cfc..0edc2b6027d 100644
--- a/spec/factories/sequences.rb
+++ b/spec/factories/sequences.rb
@@ -20,4 +20,5 @@ FactoryBot.define do
sequence(:jira_title) { |n| "[PROJ-#{n}]: fix bug" }
sequence(:jira_branch) { |n| "feature/PROJ-#{n}" }
sequence(:job_name) { |n| "job #{n}" }
+ sequence(:work_item_type_name) { |n| "bug#{n}" }
end
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 4593294fd14..c02bcfc2169 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -25,7 +25,6 @@ FactoryBot.define do
create(:service, project: projects[2], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'MattermostService', active: false)
create(:service, group: group, project: nil, type: 'MattermostService', active: true)
- create(:service, :template, type: 'MattermostService', active: true)
mattermost_instance = create(:service, :instance, type: 'MattermostService', active: true)
create(:service, project: projects[1], type: 'MattermostService', active: true, inherit_from_id: mattermost_instance.id)
create(:service, group: group, project: nil, type: 'SlackService', active: true, inherit_from_id: mattermost_instance.id)
@@ -124,6 +123,8 @@ FactoryBot.define do
create_list(:project_snippet, 2, project: projects[0], created_at: n.days.ago)
create(:personal_snippet, created_at: n.days.ago)
end
+
+ create(:operations_feature_flag, project: projects[0])
end
end
end
diff --git a/spec/factories/user_details.rb b/spec/factories/user_details.rb
index d3cf0d48577..6d58123f2e1 100644
--- a/spec/factories/user_details.rb
+++ b/spec/factories/user_details.rb
@@ -5,5 +5,6 @@ FactoryBot.define do
user
job_title { 'VP of Sales' }
pronouns { nil }
+ pronunciation { nil }
end
end
diff --git a/spec/factories/work_item/work_item_types.rb b/spec/factories/work_item/work_item_types.rb
new file mode 100644
index 00000000000..07d6d685c57
--- /dev/null
+++ b/spec/factories/work_item/work_item_types.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :work_item_type, class: 'WorkItem::Type' do
+ namespace
+
+ name { generate(:work_item_type_name) }
+ base_type { WorkItem::Type.base_types[:issue] }
+ icon_name { 'issue-type-issue' }
+
+ trait :default do
+ namespace { nil }
+ end
+
+ trait :incident do
+ base_type { WorkItem::Type.base_types[:incident] }
+ icon_name { 'issue-type-incident' }
+ end
+
+ trait :test_case do
+ base_type { WorkItem::Type.base_types[:test_case] }
+ icon_name { 'issue-type-test-case' }
+ end
+
+ trait :requirement do
+ base_type { WorkItem::Type.base_types[:requirement] }
+ icon_name { 'issue-type-requirements' }
+ end
+ end
+end
diff --git a/spec/factories_spec.rb b/spec/factories_spec.rb
index 80e94fa1628..2b308c9080e 100644
--- a/spec/factories_spec.rb
+++ b/spec/factories_spec.rb
@@ -30,7 +30,6 @@ RSpec.describe 'factories' do
[:pages_domain, :with_trusted_expired_chain],
[:pages_domain, :explicit_ecdsa],
[:project_member, :blocked],
- [:project, :remote_mirror],
[:remote_mirror, :ssh],
[:user_preference, :only_comments],
[:ci_pipeline_artifact, :remote_store]
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index cd20019115d..b06ebba3f6c 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -18,9 +18,11 @@ require_relative '../config/settings'
require_relative 'support/rspec'
require 'active_support/all'
-ActiveSupport::Dependencies.autoload_paths << 'lib'
-ActiveSupport::Dependencies.autoload_paths << 'ee/lib'
-ActiveSupport::Dependencies.autoload_paths << 'jh/lib'
+unless ActiveSupport::Dependencies.autoload_paths.frozen?
+ ActiveSupport::Dependencies.autoload_paths << 'lib'
+ ActiveSupport::Dependencies.autoload_paths << 'ee/lib'
+ ActiveSupport::Dependencies.autoload_paths << 'jh/lib'
+end
ActiveSupport::XmlMini.backend = 'Nokogiri'
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index b96762ec6ad..cd148642b90 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Admin Appearance' do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
expect_custom_new_project_appearance(appearance)
end
diff --git a/spec/features/admin/admin_dev_ops_report_spec.rb b/spec/features/admin/admin_dev_ops_report_spec.rb
index 8f1960b681c..cee79f8f440 100644
--- a/spec/features/admin/admin_dev_ops_report_spec.rb
+++ b/spec/features/admin/admin_dev_ops_report_spec.rb
@@ -19,7 +19,9 @@ RSpec.describe 'DevOps Report page', :js do
expect(page).to have_content 'Introducing Your DevOps Report'
- find('.js-close-callout').click
+ page.within(find('[data-testid="devops-score-container"]')) do
+ find('[data-testid="close-icon"]').click
+ end
expect(page).not_to have_content 'Introducing Your DevOps Report'
end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 7d7b2baf941..8315b8f44b0 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -256,7 +256,7 @@ RSpec.describe 'Admin Groups' do
visit group_group_members_path(group)
- page.within '[data-qa-selector="members_list"]' do
+ page.within '[data-qa-selector="members_list"]' do # rubocop:disable QA/SelectorUsage
expect(page).to have_content(current_user.name)
expect(page).to have_content('Developer')
end
@@ -265,7 +265,7 @@ RSpec.describe 'Admin Groups' do
visit group_group_members_path(group)
- page.within '[data-qa-selector="members_list"]' do
+ page.within '[data-qa-selector="members_list"]' do # rubocop:disable QA/SelectorUsage
expect(page).not_to have_content(current_user.name)
expect(page).not_to have_content('Developer')
end
diff --git a/spec/features/admin/admin_manage_applications_spec.rb b/spec/features/admin/admin_manage_applications_spec.rb
index e54837ede11..b6437fce540 100644
--- a/spec/features/admin/admin_manage_applications_spec.rb
+++ b/spec/features/admin/admin_manage_applications_spec.rb
@@ -3,62 +3,14 @@
require 'spec_helper'
RSpec.describe 'admin manage applications' do
+ let_it_be(:new_application_path) { new_admin_application_path }
+ let_it_be(:applications_path) { admin_applications_path }
+
before do
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
end
- it 'creates new oauth application' do
- visit admin_applications_path
-
- click_on 'New application'
- expect(page).to have_content('New application')
-
- fill_in :doorkeeper_application_name, with: 'test'
- fill_in :doorkeeper_application_redirect_uri, with: 'https://test.com'
- check :doorkeeper_application_trusted
- check :doorkeeper_application_scopes_read_user
- click_on 'Submit'
- expect(page).to have_content('Application: test')
- expect(page).to have_content('Application ID')
- expect(page).to have_content('Secret')
- expect(page).to have_content('Trusted Y')
- expect(page).to have_content('Confidential Y')
-
- click_on 'Edit'
- expect(page).to have_content('Edit application')
-
- fill_in :doorkeeper_application_name, with: 'test_changed'
- uncheck :doorkeeper_application_trusted
- uncheck :doorkeeper_application_confidential
-
- click_on 'Submit'
- expect(page).to have_content('test_changed')
- expect(page).to have_content('Application ID')
- expect(page).to have_content('Secret')
- expect(page).to have_content('Trusted N')
- expect(page).to have_content('Confidential N')
-
- visit admin_applications_path
- page.within '.oauth-applications' do
- click_on 'Destroy'
- end
- expect(page.find('.oauth-applications')).not_to have_content('test_changed')
- end
-
- context 'when scopes are blank' do
- it 'returns an error' do
- visit admin_applications_path
-
- click_on 'New application'
- expect(page).to have_content('New application')
-
- fill_in :doorkeeper_application_name, with: 'test'
- fill_in :doorkeeper_application_redirect_uri, with: 'https://test.com'
- click_on 'Submit'
-
- expect(page).to have_content("Scopes can't be blank")
- end
- end
+ include_examples 'manage applications'
end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index efb4baa8164..58bea5c4b5f 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -9,69 +9,47 @@ RSpec.describe 'Admin Mode Logout', :js do
let(:user) { create(:admin) }
- shared_examples 'combined_menu: feature flag examples' do
- before do
- # TODO: This used to use gitlab_sign_in, instead of sign_in, but that is buggy. See
- # this issue to look into why: https://gitlab.com/gitlab-org/gitlab/-/issues/331851
- sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
- visit admin_root_path
- end
+ before do
+ # TODO: This used to use gitlab_sign_in, instead of sign_in, but that is buggy. See
+ # this issue to look into why: https://gitlab.com/gitlab-org/gitlab/-/issues/331851
+ sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user)
+ visit admin_root_path
+ end
- it 'disable removes admin mode and redirects to root page' do
- gitlab_disable_admin_mode
+ it 'disable removes admin mode and redirects to root page' do
+ gitlab_disable_admin_mode
- expect(current_path).to eq root_path
+ expect(current_path).to eq root_path
- open_top_nav
+ open_top_nav
- within_top_nav do
- expect(page).to have_link(href: new_admin_session_path)
- end
- end
-
- it 'disable shows flash notice' do
- gitlab_disable_admin_mode
-
- expect(page).to have_selector('.flash-notice')
+ within_top_nav do
+ expect(page).to have_link(href: new_admin_session_path)
end
+ end
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- end
-
- it 'disable removes admin mode and redirects to root page' do
- gitlab_disable_admin_mode
-
- expect(current_path).to eq root_path
-
- open_top_nav
+ it 'disable shows flash notice' do
+ gitlab_disable_admin_mode
- within_top_nav do
- expect(page).to have_link(href: new_admin_session_path)
- end
- end
- end
+ expect(page).to have_selector('.flash-notice')
end
- context 'with combined_menu feature flag on' do
- let(:needs_rewrite_for_combined_menu_flag_on) { true }
-
+ context 'on a read-only instance' do
before do
- stub_feature_flags(combined_menu: true)
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
- it_behaves_like 'combined_menu: feature flag examples'
- end
+ it 'disable removes admin mode and redirects to root page' do
+ gitlab_disable_admin_mode
- context 'with combined_menu feature flag off' do
- let(:needs_rewrite_for_combined_menu_flag_on) { false }
+ expect(current_path).to eq root_path
- before do
- stub_feature_flags(combined_menu: false)
- end
+ open_top_nav
- it_behaves_like 'combined_menu: feature flag examples'
+ within_top_nav do
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
end
end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 9fd83f4af6d..24a10d3677d 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -2,48 +2,62 @@
require 'spec_helper'
-RSpec.describe 'Admin mode' do
+RSpec.describe 'Admin mode', :js do
include MobileHelpers
include Spec::Support::Helpers::Features::TopNavSpecHelpers
include StubENV
let(:admin) { create(:admin) }
- shared_examples 'combined_menu: feature flag examples' do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ end
+
+ context 'application setting :admin_mode is enabled', :request_store do
before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ sign_in(admin)
end
- context 'application setting :admin_mode is enabled', :request_store do
- before do
- sign_in(admin)
+ context 'when not in admin mode' do
+ it 'has no leave admin mode button' do
+ visit new_admin_session_path
+ open_top_nav
+
+ page.within('.navbar-sub-nav') do
+ expect(page).not_to have_link(href: destroy_admin_session_path)
+ end
end
- context 'when not in admin mode' do
- it 'has no leave admin mode button' do
- visit new_admin_session_path
- open_top_nav
+ it 'can open pages not in admin scope' do
+ visit new_admin_session_path
+ open_top_nav_projects
- page.within('.navbar-sub-nav') do
- expect(page).not_to have_link(href: destroy_admin_session_path)
- end
+ within_top_nav do
+ click_link('Your projects')
end
- it 'can open pages not in admin scope' do
- visit new_admin_session_path
- open_top_nav_projects
+ expect(page).to have_current_path(dashboard_projects_path)
+ end
- within_top_nav do
- click_link('Your projects')
- end
+ it 'is necessary to provide credentials again before opening pages in admin scope' do
+ visit general_admin_application_settings_path # admin logged out because not in admin_mode
- expect(page).to have_current_path(dashboard_projects_path)
- end
+ expect(page).to have_current_path(new_admin_session_path)
+ end
+
+ it 'can enter admin mode' do
+ visit new_admin_session_path
- it 'is necessary to provide credentials again before opening pages in admin scope' do
- visit general_admin_application_settings_path # admin logged out because not in admin_mode
+ fill_in 'user_password', with: admin.password
- expect(page).to have_current_path(new_admin_session_path)
+ click_button 'Enter Admin Mode'
+
+ expect(page).to have_current_path(admin_root_path)
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
it 'can enter admin mode' do
@@ -55,151 +69,82 @@ RSpec.describe 'Admin mode' do
expect(page).to have_current_path(admin_root_path)
end
+ end
+ end
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- end
-
- it 'can enter admin mode' do
- visit new_admin_session_path
-
- fill_in 'user_password', with: admin.password
+ context 'when in admin_mode' do
+ before do
+ gitlab_enable_admin_mode_sign_in(admin)
+ end
- click_button 'Enter Admin Mode'
+ it 'contains link to leave admin mode' do
+ open_top_nav
- expect(page).to have_current_path(admin_root_path)
- end
+ within_top_nav do
+ expect(page).to have_link(href: destroy_admin_session_path)
end
end
- context 'when in admin_mode' do
- before do
- gitlab_enable_admin_mode_sign_in(admin)
- end
+ it 'can leave admin mode using main dashboard link' do
+ gitlab_disable_admin_mode
- it 'contains link to leave admin mode' do
- open_top_nav
+ open_top_nav
- within_top_nav do
- expect(page).to have_link(href: destroy_admin_session_path)
- end
+ within_top_nav do
+ expect(page).to have_link(href: new_admin_session_path)
end
+ end
- it 'can leave admin mode using main dashboard link', :js do
- gitlab_disable_admin_mode
-
- open_top_nav
+ it 'can open pages not in admin scope' do
+ open_top_nav_projects
- within_top_nav do
- expect(page).to have_link(href: new_admin_session_path)
- end
+ within_top_nav do
+ click_link('Your projects')
end
- it 'can leave admin mode using dropdown menu on smaller screens', :js do
- skip('pending responsive development under :combined_menu feature flag') if Feature.enabled?(:combined_menu, default_enabled: :yaml)
+ expect(page).to have_current_path(dashboard_projects_path)
+ end
- resize_screen_xs
+ context 'nav bar' do
+ it 'shows admin dashboard links on bigger screen' do
visit root_dashboard_path
-
- find('.header-more').click unless Feature.enabled?(:combined_menu, default_enabled: :yaml)
-
- gitlab_disable_admin_mode
-
open_top_nav
- find('.header-more').click unless Feature.enabled?(:combined_menu, default_enabled: :yaml)
- expect(page).to have_link(href: new_admin_session_path)
+ expect(page).to have_link(text: 'Admin', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
end
+ end
- it 'can open pages not in admin scope' do
- open_top_nav_projects
-
- within_top_nav do
- click_link('Your projects')
- end
-
- expect(page).to have_current_path(dashboard_projects_path)
- end
-
- context 'nav bar' do
- it 'shows admin dashboard links on bigger screen' do
- visit root_dashboard_path
- open_top_nav
-
- link_text = Feature.enabled?(:combined_menu, default_enabled: :yaml) ? 'Admin' : 'Admin Area'
- expect(page).to have_link(text: link_text, href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
- end
-
- it 'relocates admin dashboard links to dropdown list on smaller screen', :js do
- skip('pending responsive development under :combined_menu feature flag') if Feature.enabled?(:combined_menu, default_enabled: :yaml)
-
- resize_screen_xs
- visit root_dashboard_path
-
- expect(page).not_to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
-
- find('.header-more').click
-
- page.within '.navbar' do
- expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
- end
- end
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- end
-
- it 'can leave admin mode', :js do
- gitlab_disable_admin_mode
+ it 'can leave admin mode' do
+ gitlab_disable_admin_mode
- open_top_nav
+ open_top_nav
- within_top_nav do
- expect(page).to have_link(href: new_admin_session_path)
- end
+ within_top_nav do
+ expect(page).to have_link(href: new_admin_session_path)
end
end
end
end
-
- context 'application setting :admin_mode is disabled' do
- before do
- stub_application_setting(admin_mode: false)
- sign_in(admin)
- end
-
- it 'shows no admin mode buttons in navbar' do
- visit admin_root_path
- open_top_nav
-
- expect(page).not_to have_link(href: new_admin_session_path)
- expect(page).not_to have_link(href: destroy_admin_session_path)
- end
- end
end
- context 'with combined_menu feature flag on', :js do
- let(:needs_rewrite_for_combined_menu_flag_on) { true }
-
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(combined_menu: true)
+ stub_application_setting(admin_mode: false)
+ sign_in(admin)
end
- it_behaves_like 'combined_menu: feature flag examples'
- end
-
- context 'with combined_menu feature flag off' do
- let(:needs_rewrite_for_combined_menu_flag_on) { false }
+ it 'shows no admin mode buttons in navbar' do
+ visit admin_root_path
+ open_top_nav
- before do
- stub_feature_flags(combined_menu: false)
+ expect(page).not_to have_link(href: new_admin_session_path)
+ expect(page).not_to have_link(href: destroy_admin_session_path)
end
-
- it_behaves_like 'combined_menu: feature flag examples'
end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 9efb31ef4c1..4a0f7ccbb0a 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe 'Admin updates settings' do
accept_terms(admin)
page.within('.as-terms') do
- check 'Require all users to accept Terms of Service and Privacy Policy when they access GitLab.'
+ check 'All users must accept the Terms of Service and Privacy Policy to access GitLab'
fill_in 'Terms of Service Agreement', with: 'Be nice!'
click_button 'Save changes'
end
@@ -490,7 +490,7 @@ RSpec.describe 'Admin updates settings' do
it 'change Prometheus settings' do
page.within('.as-prometheus') do
- check 'Enable Prometheus Metrics'
+ check 'Enable health and performance metrics endpoint'
click_button 'Save changes'
end
@@ -502,23 +502,23 @@ RSpec.describe 'Admin updates settings' do
group = create(:group)
page.within('.as-performance-bar') do
- check 'Enable access to the Performance Bar'
- fill_in 'Allowed group', with: group.path
+ check 'Allow non-administrators to access to the performance bar'
+ fill_in 'Allow access to members of the following group', with: group.path
click_on 'Save changes'
end
expect(page).to have_content "Application settings saved successfully"
- expect(find_field('Enable access to the Performance Bar')).to be_checked
- expect(find_field('Allowed group').value).to eq group.path
+ expect(find_field('Allow non-administrators to access to the performance bar')).to be_checked
+ expect(find_field('Allow access to members of the following group').value).to eq group.path
page.within('.as-performance-bar') do
- uncheck 'Enable access to the Performance Bar'
+ uncheck 'Allow non-administrators to access to the performance bar'
click_on 'Save changes'
end
expect(page).to have_content 'Application settings saved successfully'
- expect(find_field('Enable access to the Performance Bar')).not_to be_checked
- expect(find_field('Allowed group').value).to be_nil
+ expect(find_field('Allow non-administrators to access to the performance bar')).not_to be_checked
+ expect(find_field('Allow access to members of the following group').value).to be_nil
end
it 'loads usage ping payload on click', :js do
@@ -585,7 +585,7 @@ RSpec.describe 'Admin updates settings' do
page.within('.as-help-page') do
fill_in 'Additional text to show on the Help page', with: 'Example text'
- check 'Hide marketing-related entries from the Help page.'
+ check 'Hide marketing-related entries from the Help page'
fill_in 'Support page URL', with: new_support_url
fill_in 'Documentation pages URL', with: new_documentation_url
click_button 'Save changes'
@@ -634,7 +634,7 @@ RSpec.describe 'Admin updates settings' do
it "change Pages Let's Encrypt settings" do
visit preferences_admin_application_settings_path
page.within('.as-pages') do
- fill_in 'Email', with: 'my@test.example.com'
+ fill_in "Let's Encrypt email", with: 'my@test.example.com'
check "I have read and agree to the Let's Encrypt Terms of Service"
click_button 'Save changes'
end
diff --git a/spec/features/admin/dashboard_spec.rb b/spec/features/admin/dashboard_spec.rb
index 618fae3e46b..112dc9e01d8 100644
--- a/spec/features/admin/dashboard_spec.rb
+++ b/spec/features/admin/dashboard_spec.rb
@@ -19,8 +19,8 @@ RSpec.describe 'admin visits dashboard' do
# Make sure the fork_networks & fork_networks reltuples have been updated
# to get a correct count on postgresql
- ActiveRecord::Base.connection.execute('ANALYZE fork_networks')
- ActiveRecord::Base.connection.execute('ANALYZE fork_network_members')
+ ForkNetwork.connection.execute('ANALYZE fork_networks')
+ ForkNetwork.connection.execute('ANALYZE fork_network_members')
visit admin_root_path
diff --git a/spec/features/admin/integrations/user_activates_mattermost_slash_command_spec.rb b/spec/features/admin/integrations/user_activates_mattermost_slash_command_spec.rb
index 6f091d37995..22a27b33671 100644
--- a/spec/features/admin/integrations/user_activates_mattermost_slash_command_spec.rb
+++ b/spec/features/admin/integrations/user_activates_mattermost_slash_command_spec.rb
@@ -11,6 +11,12 @@ RSpec.describe 'User activates the instance-level Mattermost Slash Command integ
end
let(:edit_path) { edit_admin_application_settings_integration_path(:mattermost_slash_commands) }
+ let(:overrides_path) { overrides_admin_application_settings_integration_path(:mattermost_slash_commands) }
include_examples 'user activates the Mattermost Slash Command integration'
+
+ it 'displays navigation tabs' do
+ expect(page).to have_link('Settings', href: edit_path)
+ expect(page).to have_link('Projects using custom settings', href: overrides_path)
+ end
end
diff --git a/spec/features/admin/services/admin_visits_service_templates_spec.rb b/spec/features/admin/services/admin_visits_service_templates_spec.rb
deleted file mode 100644
index d367867ebb5..00000000000
--- a/spec/features/admin/services/admin_visits_service_templates_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Admin visits service templates' do
- let(:admin) { create(:user, :admin) }
- let(:slack_integration) { Integration.for_template.find { |s| s.type == 'SlackService' } }
-
- before do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- end
-
- context 'without an active service template' do
- before do
- visit(admin_application_settings_services_path)
- end
-
- it 'does not show service template content' do
- expect(page).not_to have_content('Service template allows you to set default values for integrations')
- end
- end
-
- context 'with an active service template' do
- before do
- create(:integrations_slack, :template, active: true)
- visit(admin_application_settings_services_path)
- end
-
- it 'shows service template content' do
- expect(page).to have_content('Service template allows you to set default values for integrations')
- end
-
- context 'without instance-level integration' do
- it 'shows a link to service template' do
- expect(page).to have_link('Slack', href: edit_admin_application_settings_service_path(slack_integration.id))
- expect(page).not_to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_integration))
- end
- end
-
- context 'with instance-level integration' do
- before do
- create(:integrations_slack, instance: true, project: nil)
- visit(admin_application_settings_services_path)
- end
-
- it 'shows a link to instance-level integration' do
- expect(page).not_to have_link('Slack', href: edit_admin_application_settings_service_path(slack_integration.id))
- expect(page).to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_integration))
- end
- end
- end
-end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index e6eb76b13eb..624bfde7359 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -90,6 +90,39 @@ RSpec.describe 'Admin::Users::User' do
end
end
+ context 'when user is the sole owner of a group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user_sole_owner_of_group) { create(:user) }
+
+ before do
+ group.add_owner(user_sole_owner_of_group)
+ end
+
+ it 'shows `Delete user and contributions` action but not `Delete user` action', :js do
+ visit admin_user_path(user_sole_owner_of_group)
+
+ click_user_dropdown_toggle(user_sole_owner_of_group.id)
+
+ expect(page).to have_button('Delete user and contributions')
+ expect(page).not_to have_button('Delete user', exact: true)
+ end
+
+ it 'allows user to be deleted by using the `Delete user and contributions` action', :js do
+ visit admin_user_path(user_sole_owner_of_group)
+
+ click_action_in_user_dropdown(user_sole_owner_of_group.id, 'Delete user and contributions')
+
+ page.within('[role="dialog"]') do
+ fill_in('username', with: user_sole_owner_of_group.name)
+ click_button('Delete user and contributions')
+ end
+
+ wait_for_requests
+
+ expect(page).to have_content('The user is being deleted.')
+ end
+ end
+
describe 'Impersonation' do
let_it_be(:another_user) { create(:user) }
@@ -151,7 +184,7 @@ RSpec.describe 'Admin::Users::User' do
it 'logs in as the user when impersonate is clicked' do
subject
- find('[data-qa-selector="user_menu"]').click
+ find('[data-qa-selector="user_menu"]').click # rubocop:disable QA/SelectorUsage
expect(page.find(:css, '[data-testid="user-profile-link"]')['data-user']).to eql(another_user.username)
end
@@ -187,7 +220,7 @@ RSpec.describe 'Admin::Users::User' do
it 'logs out of impersonated user back to original user' do
subject
- find('[data-qa-selector="user_menu"]').click
+ find('[data-qa-selector="user_menu"]').click # rubocop:disable QA/SelectorUsage
expect(page.find(:css, '[data-testid="user-profile-link"]')['data-user']).to eq(current_user.username)
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 4b52bb953ed..9a5b5bbfc34 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Project issue boards', :js do
include DragTo
include MobileHelpers
+ include BoardHelpers
let_it_be(:group) { create(:group, :nested) }
let_it_be(:project) { create(:project, :public, namespace: group) }
@@ -12,584 +13,508 @@ RSpec.describe 'Project issue boards', :js do
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
- before do
- project.add_maintainer(user)
- project.add_maintainer(user2)
-
- sign_in(user)
-
- set_cookie('sidebar_collapsed', 'true')
- end
-
- context 'no lists' do
+ context 'signed in user' do
before do
- visit_project_board_path_without_query_limit(project, board)
- end
-
- it 'creates default lists' do
- lists = %w[Open Closed]
-
- wait_for_requests
+ project.add_maintainer(user)
+ project.add_maintainer(user2)
- expect(page).to have_selector('.board', count: 2)
+ sign_in(user)
- page.all('.board').each_with_index do |list, i|
- expect(list.find('.board-title')).to have_content(lists[i])
- end
+ set_cookie('sidebar_collapsed', 'true')
end
- end
- context 'with lists' do
- let_it_be(:milestone) { create(:milestone, project: project) }
-
- let_it_be(:planning) { create(:label, project: project, name: 'Planning', description: 'Test') }
- let_it_be(:development) { create(:label, project: project, name: 'Development') }
- let_it_be(:testing) { create(:label, project: project, name: 'Testing') }
- let_it_be(:bug) { create(:label, project: project, name: 'Bug') }
- let_it_be(:backlog) { create(:label, project: project, name: 'Backlog') }
- let_it_be(:closed) { create(:label, project: project, name: 'Closed') }
- let_it_be(:accepting) { create(:label, project: project, name: 'Accepting Merge Requests') }
- let_it_be(:a_plus) { create(:label, project: project, name: 'A+') }
- let_it_be(:list1) { create(:list, board: board, label: planning, position: 0) }
- let_it_be(:list2) { create(:list, board: board, label: development, position: 1) }
- let_it_be(:backlog_list) { create(:backlog_list, board: board) }
-
- let_it_be(:confidential_issue) { create(:labeled_issue, :confidential, project: project, author: user, labels: [planning], relative_position: 9) }
- let_it_be(:issue1) { create(:labeled_issue, project: project, title: 'aaa', description: '111', assignees: [user], labels: [planning], relative_position: 8) }
- let_it_be(:issue2) { create(:labeled_issue, project: project, title: 'bbb', description: '222', author: user2, labels: [planning], relative_position: 7) }
- let_it_be(:issue3) { create(:labeled_issue, project: project, title: 'ccc', description: '333', labels: [planning], relative_position: 6) }
- let_it_be(:issue4) { create(:labeled_issue, project: project, title: 'ddd', description: '444', labels: [planning], relative_position: 5) }
- let_it_be(:issue5) { create(:labeled_issue, project: project, title: 'eee', description: '555', labels: [planning], milestone: milestone, relative_position: 4) }
- let_it_be(:issue6) { create(:labeled_issue, project: project, title: 'fff', description: '666', labels: [planning, development], relative_position: 3) }
- let_it_be(:issue7) { create(:labeled_issue, project: project, title: 'ggg', description: '777', labels: [development], relative_position: 2) }
- let_it_be(:issue8) { create(:closed_issue, project: project, title: 'hhh', description: '888') }
- let_it_be(:issue9) { create(:labeled_issue, project: project, title: 'iii', description: '999', labels: [planning, testing, bug, accepting], relative_position: 1) }
- let_it_be(:issue10) { create(:labeled_issue, project: project, title: 'issue +', description: 'A+ great issue', labels: [a_plus]) }
-
- before do
- stub_feature_flags(board_new_list: false)
+ context 'no lists' do
+ before do
+ visit_project_board_path_without_query_limit(project, board)
+ end
- visit_project_board_path_without_query_limit(project, board)
+ it 'creates default lists' do
+ lists = %w[Open Closed]
- wait_for_requests
+ wait_for_requests
- expect(page).to have_selector('.board', count: 4)
- expect(find('.board:nth-child(2)')).to have_selector('.board-card')
- expect(find('.board:nth-child(3)')).to have_selector('.board-card')
- expect(find('.board:nth-child(4)')).to have_selector('.board-card')
- end
+ expect(page).to have_selector('.board', count: 2)
- it 'shows description tooltip on list title', :quarantine do
- page.within('.board:nth-child(2)') do
- expect(find('.board-title span.has-tooltip')[:title]).to eq('Test')
+ page.all('.board').each_with_index do |list, i|
+ expect(list.find('.board-title')).to have_content(lists[i])
+ end
end
end
- it 'shows issues in lists' do
- wait_for_board_cards(2, 8)
- wait_for_board_cards(3, 2)
- end
+ context 'with lists' do
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
+ let_it_be(:planning) { create(:label, project: project, name: 'Planning', description: 'Test') }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:testing) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:bug) { create(:label, project: project, name: 'Bug') }
+ let_it_be(:backlog) { create(:label, project: project, name: 'Backlog') }
+ let_it_be(:closed) { create(:label, project: project, name: 'Closed') }
+ let_it_be(:accepting) { create(:label, project: project, name: 'Accepting Merge Requests') }
+ let_it_be(:a_plus) { create(:label, project: project, name: 'A+') }
+ let_it_be(:list1) { create(:list, board: board, label: planning, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: development, position: 1) }
+ let_it_be(:backlog_list) { create(:backlog_list, board: board) }
+
+ let_it_be(:confidential_issue) { create(:labeled_issue, :confidential, project: project, author: user, labels: [planning], relative_position: 9) }
+ let_it_be(:issue1) { create(:labeled_issue, project: project, title: 'aaa', description: '111', assignees: [user], labels: [planning], relative_position: 8) }
+ let_it_be(:issue2) { create(:labeled_issue, project: project, title: 'bbb', description: '222', author: user2, labels: [planning], relative_position: 7) }
+ let_it_be(:issue3) { create(:labeled_issue, project: project, title: 'ccc', description: '333', labels: [planning], relative_position: 6) }
+ let_it_be(:issue4) { create(:labeled_issue, project: project, title: 'ddd', description: '444', labels: [planning], relative_position: 5) }
+ let_it_be(:issue5) { create(:labeled_issue, project: project, title: 'eee', description: '555', labels: [planning], milestone: milestone, relative_position: 4) }
+ let_it_be(:issue6) { create(:labeled_issue, project: project, title: 'fff', description: '666', labels: [planning, development], relative_position: 3) }
+ let_it_be(:issue7) { create(:labeled_issue, project: project, title: 'ggg', description: '777', labels: [development], relative_position: 2) }
+ let_it_be(:issue8) { create(:closed_issue, project: project, title: 'hhh', description: '888') }
+ let_it_be(:issue9) { create(:labeled_issue, project: project, title: 'iii', description: '999', labels: [planning, testing, bug, accepting], relative_position: 1) }
+ let_it_be(:issue10) { create(:labeled_issue, project: project, title: 'issue +', description: 'A+ great issue', labels: [a_plus]) }
- it 'shows confidential issues with icon' do
- page.within(find('.board:nth-child(2)')) do
- expect(page).to have_selector('.confidential-icon', count: 1)
+ before do
+ visit_project_board_path_without_query_limit(project, board)
end
- end
-
- it 'search closed list' do
- find('.filtered-search').set(issue8.title)
- find('.filtered-search').native.send_keys(:enter)
- wait_for_requests
+ it 'shows description tooltip on list title', :quarantine do
+ page.within('.board:nth-child(2)') do
+ expect(find('.board-title span.has-tooltip')[:title]).to eq('Test')
+ end
+ end
- expect(find('.board:nth-child(2)')).to have_selector('.board-card', count: 0)
- expect(find('.board:nth-child(3)')).to have_selector('.board-card', count: 0)
- expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 1)
- end
+ it 'shows issues in lists' do
+ wait_for_board_cards(2, 8)
+ wait_for_board_cards(3, 2)
+ end
- it 'search list' do
- find('.filtered-search').set(issue5.title)
- find('.filtered-search').native.send_keys(:enter)
+ it 'shows confidential issues with icon' do
+ page.within(find('.board:nth-child(2)')) do
+ expect(page).to have_selector('.confidential-icon', count: 1)
+ end
+ end
- wait_for_requests
+ it 'search closed list' do
+ find('.filtered-search').set(issue8.title)
+ find('.filtered-search').native.send_keys(:enter)
- expect(find('.board:nth-child(2)')).to have_selector('.board-card', count: 1)
- expect(find('.board:nth-child(3)')).to have_selector('.board-card', count: 0)
- expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 0)
- end
+ wait_for_requests
- context 'search list negation queries' do
- before do
- visit_project_board_path_without_query_limit(project, board)
+ expect(find('.board:nth-child(2)')).to have_selector('.board-card', count: 0)
+ expect(find('.board:nth-child(3)')).to have_selector('.board-card', count: 0)
+ expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 1)
end
- it 'does not have the != option' do
- find('.filtered-search').set('label:')
+ it 'search list' do
+ find('.filtered-search').set(issue5.title)
+ find('.filtered-search').native.send_keys(:enter)
wait_for_requests
- within('#js-dropdown-operator') do
- tokens = all(:css, 'li.filter-dropdown-item')
- expect(tokens.count).to eq(2)
- button = tokens[0].find('button')
- expect(button).to have_content('=')
- button = tokens[1].find('button')
- expect(button).to have_content('!=')
- end
- end
- end
- it 'allows user to delete board' do
- remove_list
+ expect(find('.board:nth-child(2)')).to have_selector('.board-card', count: 1)
+ expect(find('.board:nth-child(3)')).to have_selector('.board-card', count: 0)
+ expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 0)
+ end
- wait_for_requests
+ context 'search list negation queries' do
+ before do
+ visit_project_board_path_without_query_limit(project, board)
+ end
- expect(page).to have_selector('.board', count: 3)
- end
+ it 'does not have the != option' do
+ find('.filtered-search').set('label:')
- it 'infinite scrolls list' do
- create_list(:labeled_issue, 30, project: project, labels: [planning])
+ wait_for_requests
+ within('#js-dropdown-operator') do
+ tokens = all(:css, 'li.filter-dropdown-item')
+ expect(tokens.count).to eq(2)
+ button = tokens[0].find('button')
+ expect(button).to have_content('=')
+ button = tokens[1].find('button')
+ expect(button).to have_content('!=')
+ end
+ end
+ end
- visit_project_board_path_without_query_limit(project, board)
+ it 'allows user to delete board' do
+ remove_list
- page.within(find('.board:nth-child(2)')) do
- expect(page.find('.board-header')).to have_content('38')
- expect(page).to have_selector('.board-card', count: 10)
- expect(page).to have_content('Showing 10 of 38 issues')
+ wait_for_requests
- find('.board .board-list')
+ expect(page).to have_selector('.board', count: 3)
+ end
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
- end
+ it 'infinite scrolls list' do
+ create_list(:labeled_issue, 30, project: project, labels: [planning])
- expect(page).to have_selector('.board-card', count: 20)
- expect(page).to have_content('Showing 20 of 38 issues')
+ visit_project_board_path_without_query_limit(project, board)
- find('.board .board-list')
+ page.within(find('.board:nth-child(2)')) do
+ expect(page.find('.board-header')).to have_content('38')
+ expect(page).to have_selector('.board-card', count: 10)
+ expect(page).to have_content('Showing 10 of 38 issues')
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
- end
+ find('.board .board-list')
- expect(page).to have_selector('.board-card', count: 30)
- expect(page).to have_content('Showing 30 of 38 issues')
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
- find('.board .board-list')
+ expect(page).to have_selector('.board-card', count: 20)
+ expect(page).to have_content('Showing 20 of 38 issues')
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
- end
+ find('.board .board-list')
- expect(page).to have_selector('.board-card', count: 38)
- expect(page).to have_content('Showing all issues')
- end
- end
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
- context 'closed' do
- it 'shows list of closed issues' do
- wait_for_board_cards(4, 1)
- wait_for_requests
- end
+ expect(page).to have_selector('.board-card', count: 30)
+ expect(page).to have_content('Showing 30 of 38 issues')
- it 'moves issue to closed' do
- drag(list_from_index: 1, list_to_index: 3)
+ find('.board .board-list')
- wait_for_board_cards(2, 7)
- wait_for_board_cards(3, 2)
- wait_for_board_cards(4, 2)
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
- expect(find('.board:nth-child(2)')).not_to have_content(issue9.title)
- expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 2)
- expect(find('.board:nth-child(4)')).to have_content(issue9.title)
- expect(find('.board:nth-child(4)')).not_to have_content(planning.title)
+ expect(page).to have_selector('.board-card', count: 38)
+ expect(page).to have_content('Showing all issues')
+ end
end
- it 'removes all of the same issue to closed' do
- drag(list_from_index: 1, list_to_index: 3)
-
- wait_for_board_cards(2, 7)
- wait_for_board_cards(3, 2)
- wait_for_board_cards(4, 2)
+ context 'closed' do
+ it 'shows list of closed issues' do
+ wait_for_board_cards(4, 1)
+ wait_for_requests
+ end
- expect(find('.board:nth-child(2)')).not_to have_content(issue9.title)
- expect(find('.board:nth-child(4)')).to have_content(issue9.title)
- expect(find('.board:nth-child(4)')).not_to have_content(planning.title)
- end
- end
+ it 'moves issue to closed' do
+ drag(list_from_index: 1, list_to_index: 3)
- context 'lists' do
- it 'changes position of list' do
- drag(list_from_index: 2, list_to_index: 1, selector: '.board-header')
+ wait_for_board_cards(2, 7)
+ wait_for_board_cards(3, 2)
+ wait_for_board_cards(4, 2)
- wait_for_board_cards(2, 2)
- wait_for_board_cards(3, 8)
- wait_for_board_cards(4, 1)
+ expect(find('.board:nth-child(2)')).not_to have_content(issue9.title)
+ expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 2)
+ expect(find('.board:nth-child(4)')).to have_content(issue9.title)
+ expect(find('.board:nth-child(4)')).not_to have_content(planning.title)
+ end
- expect(find('.board:nth-child(2)')).to have_content(development.title)
- expect(find('.board:nth-child(3)')).to have_content(planning.title)
+ it 'removes all of the same issue to closed' do
+ drag(list_from_index: 1, list_to_index: 3)
- # Make sure list positions are preserved after a reload
- visit_project_board_path_without_query_limit(project, board)
+ wait_for_board_cards(2, 7)
+ wait_for_board_cards(3, 2)
+ wait_for_board_cards(4, 2)
- expect(find('.board:nth-child(2)')).to have_content(development.title)
- expect(find('.board:nth-child(3)')).to have_content(planning.title)
+ expect(find('.board:nth-child(2)')).not_to have_content(issue9.title)
+ expect(find('.board:nth-child(4)')).to have_content(issue9.title)
+ expect(find('.board:nth-child(4)')).not_to have_content(planning.title)
+ end
end
- it 'dragging does not duplicate list' do
- selector = '.board:not(.is-ghost) .board-header'
- expect(page).to have_selector(selector, text: development.title, count: 1)
+ context 'lists' do
+ it 'changes position of list' do
+ drag(list_from_index: 2, list_to_index: 1, selector: '.board-header')
- drag(list_from_index: 2, list_to_index: 1, selector: '.board-header', perform_drop: false)
+ expect(find('.board:nth-child(2) [data-testid="board-list-header"]')).to have_content(development.title)
+ expect(find('.board:nth-child(3) [data-testid="board-list-header"]')).to have_content(planning.title)
- expect(page).to have_selector(selector, text: development.title, count: 1)
- end
+ # Make sure list positions are preserved after a reload
+ visit_project_board_path_without_query_limit(project, board)
- it 'issue moves between lists and does not show the "Development" label since the card is in the "Development" list label' do
- drag(list_from_index: 1, from_index: 1, list_to_index: 2)
+ expect(find('.board:nth-child(2) [data-testid="board-list-header"]')).to have_content(development.title)
+ expect(find('.board:nth-child(3) [data-testid="board-list-header"]')).to have_content(planning.title)
+ end
- wait_for_board_cards(2, 7)
- wait_for_board_cards(3, 2)
- wait_for_board_cards(4, 1)
+ context 'without backlog and closed lists' do
+ let_it_be(:board) { create(:board, project: project, hide_backlog_list: true, hide_closed_list: true) }
+ let_it_be(:list1) { create(:list, board: board, label: planning, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: development, position: 1) }
- expect(find('.board:nth-child(3)')).to have_content(issue6.title)
- expect(find('.board:nth-child(3)').all('.board-card').last).not_to have_content(development.title)
- end
+ it 'changes position of list' do
+ visit_project_board_path_without_query_limit(project, board)
- it 'issue moves between lists and does not show the "Planning" label since the card is in the "Planning" list label' do
- drag(list_from_index: 2, list_to_index: 1)
+ drag(list_from_index: 0, list_to_index: 1, selector: '.board-header')
- wait_for_board_cards(2, 9)
- wait_for_board_cards(3, 1)
- wait_for_board_cards(4, 1)
+ expect(find('.board:nth-child(1) [data-testid="board-list-header"]')).to have_content(development.title)
+ expect(find('.board:nth-child(2) [data-testid="board-list-header"]')).to have_content(planning.title)
- expect(find('.board:nth-child(2)')).to have_content(issue7.title)
- expect(find('.board:nth-child(2)').all('.board-card').first).not_to have_content(planning.title)
- end
+ # Make sure list positions are preserved after a reload
+ visit_project_board_path_without_query_limit(project, board)
- it 'issue moves from closed' do
- drag(list_from_index: 2, list_to_index: 3)
+ expect(find('.board:nth-child(1) [data-testid="board-list-header"]')).to have_content(development.title)
+ expect(find('.board:nth-child(2) [data-testid="board-list-header"]')).to have_content(planning.title)
+ end
+ end
- wait_for_board_cards(2, 8)
- wait_for_board_cards(3, 1)
- wait_for_board_cards(4, 2)
+ it 'dragging does not duplicate list' do
+ selector = '.board:not(.is-ghost) .board-header'
+ expect(page).to have_selector(selector, text: development.title, count: 1)
- expect(find('.board:nth-child(4)')).to have_content(issue8.title)
- end
+ drag(list_from_index: 2, list_to_index: 1, selector: '.board-header', perform_drop: false)
- context 'issue card' do
- it 'shows assignee' do
- page.within(find('.board:nth-child(2)')) do
- expect(page).to have_selector('.avatar', count: 1)
- end
+ expect(page).to have_selector(selector, text: development.title, count: 1)
end
- context 'list header' do
- let(:total_planning_issues) { "8" }
+ it 'issue moves between lists and does not show the "Development" label since the card is in the "Development" list label' do
+ drag(list_from_index: 1, from_index: 1, list_to_index: 2)
- it 'shows issue count on the list' do
- page.within(find(".board:nth-child(2)")) do
- expect(page.find('[data-testid="board-items-count"]')).to have_text(total_planning_issues)
- expect(page).not_to have_selector('.js-max-issue-size')
- end
- end
+ wait_for_board_cards(2, 7)
+ wait_for_board_cards(3, 2)
+ wait_for_board_cards(4, 1)
+
+ expect(find('.board:nth-child(3)')).to have_content(issue6.title)
+ expect(find('.board:nth-child(3)').all('.board-card').last).not_to have_content(development.title)
end
- end
- context 'new list' do
- it 'shows all labels in new list dropdown' do
- click_button 'Add list'
+ it 'issue moves between lists and does not show the "Planning" label since the card is in the "Planning" list label' do
+ drag(list_from_index: 2, list_to_index: 1)
- wait_for_requests
+ wait_for_board_cards(2, 9)
+ wait_for_board_cards(3, 1)
+ wait_for_board_cards(4, 1)
- page.within('.dropdown-menu-issues-board-new') do
- expect(page).to have_content(planning.title)
- expect(page).to have_content(development.title)
- expect(page).to have_content(testing.title)
- end
+ expect(find('.board:nth-child(2)')).to have_content(issue7.title)
+ expect(find('.board:nth-child(2)').all('.board-card').first).not_to have_content(planning.title)
end
- it 'creates new list for label' do
- click_button 'Add list'
- wait_for_requests
-
- page.within('.dropdown-menu-issues-board-new') do
- click_link testing.title
- end
+ it 'issue moves from closed' do
+ drag(list_from_index: 2, list_to_index: 3)
- wait_for_requests
+ wait_for_board_cards(2, 8)
+ wait_for_board_cards(3, 1)
+ wait_for_board_cards(4, 2)
- expect(page).to have_selector('.board', count: 5)
+ expect(find('.board:nth-child(4)')).to have_content(issue8.title)
end
- it 'creates new list for Backlog label' do
- click_button 'Add list'
- wait_for_requests
-
- page.within('.dropdown-menu-issues-board-new') do
- click_link backlog.title
+ context 'issue card' do
+ it 'shows assignee' do
+ page.within(find('.board:nth-child(2)')) do
+ expect(page).to have_selector('.avatar', count: 1)
+ end
end
- wait_for_requests
+ context 'list header' do
+ let(:total_planning_issues) { "8" }
- expect(page).to have_selector('.board', count: 5)
+ it 'shows issue count on the list' do
+ page.within(find(".board:nth-child(2)")) do
+ expect(page.find('[data-testid="board-items-count"]')).to have_text(total_planning_issues)
+ expect(page).not_to have_selector('.js-max-issue-size')
+ end
+ end
+ end
end
+ end
+
+ context 'filtering' do
+ it 'filters by author' do
+ set_filter("author", user2.username)
+ click_filter_link(user2.username)
+ submit_filter
- it 'creates new list for Closed label' do
- click_button 'Add list'
wait_for_requests
+ wait_for_board_cards(2, 1)
+ wait_for_empty_boards((3..4))
+ end
- page.within('.dropdown-menu-issues-board-new') do
- click_link closed.title
- end
+ it 'filters by assignee' do
+ set_filter("assignee", user.username)
+ click_filter_link(user.username)
+ submit_filter
wait_for_requests
- expect(page).to have_selector('.board', count: 5)
+ wait_for_board_cards(2, 1)
+ wait_for_empty_boards((3..4))
end
- it 'keeps dropdown open after adding new list' do
- click_button 'Add list'
- wait_for_requests
-
- page.within('.dropdown-menu-issues-board-new') do
- click_link closed.title
- end
+ it 'filters by milestone' do
+ set_filter("milestone", "\"#{milestone.title}")
+ click_filter_link(milestone.title)
+ submit_filter
wait_for_requests
-
- expect(page).to have_css('#js-add-list.show')
+ wait_for_board_cards(2, 1)
+ wait_for_board_cards(3, 0)
+ wait_for_board_cards(4, 0)
end
- it 'creates new list from a new label' do
- click_button 'Add list'
+ it 'filters by label' do
+ set_filter("label", testing.title)
+ click_filter_link(testing.title)
+ submit_filter
wait_for_requests
+ wait_for_board_cards(2, 1)
+ wait_for_empty_boards((3..4))
+ end
- click_link 'Create project label'
+ it 'filters by label with encoded character' do
+ set_filter("label", a_plus.title)
+ click_filter_link(a_plus.title)
+ submit_filter
- fill_in('new_label_name', with: 'Testing New Label - with list')
+ wait_for_board_cards(1, 1)
+ wait_for_empty_boards((2..4))
+ end
- first('.suggest-colors a').click
+ it 'filters by label with space after reload', :quarantine do
+ set_filter("label", "\"#{accepting.title}")
+ click_filter_link(accepting.title)
+ submit_filter
- click_button 'Create'
+ # Test after reload
+ page.evaluate_script 'window.location.reload()'
+ wait_for_board_cards(2, 1)
+ wait_for_empty_boards((3..4))
wait_for_requests
- wait_for_requests
- expect(page).to have_selector('.board', count: 5)
+ page.within(find('.board:nth-child(2)')) do
+ expect(page.find('.board-header')).to have_content('1')
+ expect(page).to have_selector('.board-card', count: 1)
+ end
+
+ page.within(find('.board:nth-child(3)')) do
+ expect(page.find('.board-header')).to have_content('0')
+ expect(page).to have_selector('.board-card', count: 0)
+ end
end
- end
- end
- context 'filtering' do
- it 'filters by author' do
- set_filter("author", user2.username)
- click_filter_link(user2.username)
- submit_filter
+ it 'removes filtered labels' do
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ set_filter("label", testing.title)
+ click_filter_link(testing.title)
+ submit_filter
- wait_for_requests
- wait_for_board_cards(2, 1)
- wait_for_empty_boards((3..4))
- end
+ wait_for_board_cards(2, 1)
- it 'filters by assignee' do
- set_filter("assignee", user.username)
- click_filter_link(user.username)
- submit_filter
+ find('.clear-search').click
+ submit_filter
+ end
- wait_for_requests
+ wait_for_board_cards(2, 8)
+ end
- wait_for_board_cards(2, 1)
- wait_for_empty_boards((3..4))
- end
+ it 'infinite scrolls list with label filter' do
+ create_list(:labeled_issue, 30, project: project, labels: [planning, testing])
- it 'filters by milestone' do
- set_filter("milestone", "\"#{milestone.title}")
- click_filter_link(milestone.title)
- submit_filter
+ set_filter("label", testing.title)
+ click_filter_link(testing.title)
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ submit_filter
+ end
- wait_for_requests
- wait_for_board_cards(2, 1)
- wait_for_board_cards(3, 0)
- wait_for_board_cards(4, 0)
- end
+ wait_for_requests
- it 'filters by label' do
- set_filter("label", testing.title)
- click_filter_link(testing.title)
- submit_filter
+ page.within(find('.board:nth-child(2)')) do
+ expect(page.find('.board-header')).to have_content('31')
+ expect(page).to have_selector('.board-card', count: 10)
+ expect(page).to have_content('Showing 10 of 31 issues')
- wait_for_requests
- wait_for_board_cards(2, 1)
- wait_for_empty_boards((3..4))
- end
+ find('.board .board-list')
- it 'filters by label with encoded character' do
- set_filter("label", a_plus.title)
- click_filter_link(a_plus.title)
- submit_filter
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
- wait_for_board_cards(1, 1)
- wait_for_empty_boards((2..4))
- end
+ expect(page).to have_selector('.board-card', count: 20)
+ expect(page).to have_content('Showing 20 of 31 issues')
- it 'filters by label with space after reload', :quarantine do
- set_filter("label", "\"#{accepting.title}")
- click_filter_link(accepting.title)
- submit_filter
+ find('.board .board-list')
- # Test after reload
- page.evaluate_script 'window.location.reload()'
- wait_for_board_cards(2, 1)
- wait_for_empty_boards((3..4))
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
- wait_for_requests
+ expect(page).to have_selector('.board-card', count: 30)
+ expect(page).to have_content('Showing 30 of 31 issues')
- page.within(find('.board:nth-child(2)')) do
- expect(page.find('.board-header')).to have_content('1')
- expect(page).to have_selector('.board-card', count: 1)
- end
+ find('.board .board-list')
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
- page.within(find('.board:nth-child(3)')) do
- expect(page.find('.board-header')).to have_content('0')
- expect(page).to have_selector('.board-card', count: 0)
+ expect(page).to have_selector('.board-card', count: 31)
+ expect(page).to have_content('Showing all issues')
+ end
end
- end
- it 'removes filtered labels' do
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ it 'filters by multiple labels', :quarantine do
set_filter("label", testing.title)
click_filter_link(testing.title)
- submit_filter
- wait_for_board_cards(2, 1)
+ set_filter("label", bug.title)
+ click_filter_link(bug.title)
- find('.clear-search').click
submit_filter
- end
- wait_for_board_cards(2, 8)
- end
-
- it 'infinite scrolls list with label filter' do
- create_list(:labeled_issue, 30, project: project, labels: [planning, testing])
+ wait_for_requests
- set_filter("label", testing.title)
- click_filter_link(testing.title)
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- submit_filter
+ wait_for_board_cards(2, 1)
+ wait_for_empty_boards((3..4))
end
- wait_for_requests
-
- page.within(find('.board:nth-child(2)')) do
- expect(page.find('.board-header')).to have_content('31')
- expect(page).to have_selector('.board-card', count: 10)
- expect(page).to have_content('Showing 10 of 31 issues')
-
- find('.board .board-list')
-
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
- end
-
- expect(page).to have_selector('.board-card', count: 20)
- expect(page).to have_content('Showing 20 of 31 issues')
-
- find('.board .board-list')
-
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ it 'filters by clicking label button on issue' do
+ page.within(find('.board:nth-child(2)')) do
+ expect(page).to have_selector('.board-card', count: 8)
+ expect(find('.board-card', match: :first)).to have_content(bug.title)
+ click_link(bug.title)
+ wait_for_requests
end
- expect(page).to have_selector('.board-card', count: 30)
- expect(page).to have_content('Showing 30 of 31 issues')
-
- find('.board .board-list')
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ page.within('.tokens-container') do
+ expect(page).to have_content(bug.title)
end
- expect(page).to have_selector('.board-card', count: 31)
- expect(page).to have_content('Showing all issues')
- end
- end
-
- it 'filters by multiple labels', :quarantine do
- set_filter("label", testing.title)
- click_filter_link(testing.title)
-
- set_filter("label", bug.title)
- click_filter_link(bug.title)
-
- submit_filter
-
- wait_for_requests
-
- wait_for_board_cards(2, 1)
- wait_for_empty_boards((3..4))
- end
-
- it 'filters by clicking label button on issue' do
- page.within(find('.board:nth-child(2)')) do
- expect(page).to have_selector('.board-card', count: 8)
- expect(find('.board-card', match: :first)).to have_content(bug.title)
- click_link(bug.title)
wait_for_requests
- end
- page.within('.tokens-container') do
- expect(page).to have_content(bug.title)
+ wait_for_board_cards(2, 1)
+ wait_for_empty_boards((3..4))
end
- wait_for_requests
+ it 'removes label filter by clicking label button on issue' do
+ page.within(find('.board:nth-child(2)')) do
+ page.within(find('.board-card', match: :first)) do
+ click_link(bug.title)
+ end
- wait_for_board_cards(2, 1)
- wait_for_empty_boards((3..4))
- end
+ wait_for_requests
- it 'removes label filter by clicking label button on issue' do
- page.within(find('.board:nth-child(2)')) do
- page.within(find('.board-card', match: :first)) do
- click_link(bug.title)
+ expect(page).to have_selector('.board-card', count: 1)
end
wait_for_requests
-
- expect(page).to have_selector('.board-card', count: 1)
end
-
- wait_for_requests
end
end
- end
- context 'issue board focus mode' do
- before do
- visit project_board_path(project, board)
- wait_for_requests
- end
+ context 'issue board focus mode' do
+ before do
+ visit project_board_path(project, board)
+ wait_for_requests
+ end
- it 'shows the button' do
- expect(page).to have_button('Toggle focus mode')
+ it 'shows the button' do
+ expect(page).to have_button('Toggle focus mode')
+ end
end
- end
- context 'keyboard shortcuts' do
- before do
- visit_project_board_path_without_query_limit(project, board)
- wait_for_requests
- end
+ context 'keyboard shortcuts' do
+ before do
+ visit_project_board_path_without_query_limit(project, board)
+ wait_for_requests
+ end
- it 'allows user to use keyboard shortcuts' do
- find('body').native.send_keys('i')
- expect(page).to have_content('New Issue')
+ it 'allows user to use keyboard shortcuts' do
+ find('body').native.send_keys('i')
+ expect(page).to have_content('New Issue')
+ end
end
end
context 'signed out user' do
before do
- sign_out(:user)
visit project_board_path(project, board)
wait_for_requests
end
@@ -599,7 +524,7 @@ RSpec.describe 'Project issue boards', :js do
end
it 'does not show create new list' do
- expect(page).not_to have_button('.js-new-board-list')
+ expect(page).not_to have_button('Create list')
end
it 'does not allow dragging' do
@@ -612,34 +537,16 @@ RSpec.describe 'Project issue boards', :js do
before do
project.add_guest(user_guest)
- sign_out(:user)
sign_in(user_guest)
visit project_board_path(project, board)
wait_for_requests
end
it 'does not show create new list' do
- expect(page).not_to have_selector('.js-new-board-list')
+ expect(page).not_to have_button('Create list')
end
end
- def drag(selector: '.board-list', list_from_index: 0, from_index: 0, to_index: 0, list_to_index: 0, perform_drop: true)
- # ensure there is enough horizontal space for four boards
- inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- resize_window(2000, 800)
-
- drag_to(selector: selector,
- scrollable: '#board-app',
- list_from_index: list_from_index,
- from_index: from_index,
- to_index: to_index,
- list_to_index: list_to_index,
- perform_drop: perform_drop)
- end
-
- wait_for_requests
- end
-
def wait_for_board_cards(board_number, expected_cards)
page.within(find(".board:nth-child(#{board_number})")) do
expect(page.find('.board-header')).to have_content(expected_cards.to_s)
@@ -682,6 +589,8 @@ RSpec.describe 'Project issue boards', :js do
def visit_project_board_path_without_query_limit(project, board)
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
visit project_board_path(project, board)
+
+ wait_for_requests
end
end
end
diff --git a/spec/features/callouts/service_templates_deprecation_spec.rb b/spec/features/callouts/service_templates_deprecation_spec.rb
deleted file mode 100644
index b6403b54e29..00000000000
--- a/spec/features/callouts/service_templates_deprecation_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Service templates deprecation callout' do
- let_it_be(:admin) { create(:admin) }
- let_it_be(:non_admin) { create(:user) }
- let_it_be(:callout_content) { 'Service templates are deprecated and will be removed in GitLab 14.0.' }
-
- context 'when a non-admin is logged in' do
- before do
- sign_in(non_admin)
- visit root_dashboard_path
- end
-
- it 'does not display callout' do
- expect(page).not_to have_content callout_content
- end
- end
-
- context 'when an admin is logged in' do
- before do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
-
- visit root_dashboard_path
- end
-
- context 'with no active service templates' do
- it 'does not display callout' do
- expect(page).not_to have_content callout_content
- end
- end
-
- context 'with active service template' do
- before do
- create(:service, :template, type: 'MattermostService', active: true)
- visit root_dashboard_path
- end
-
- it 'displays callout' do
- expect(page).to have_content callout_content
- expect(page).to have_link 'See affected service templates', href: admin_application_settings_services_path
- end
-
- context 'when callout is dismissed', :js do
- before do
- find('[data-testid="close-service-templates-deprecated-callout"]').click
-
- visit root_dashboard_path
- end
-
- it 'does not display callout' do
- expect(page).not_to have_content callout_content
- end
- end
- end
- end
-end
diff --git a/spec/features/clusters/cluster_health_dashboard_spec.rb b/spec/features/clusters/cluster_health_dashboard_spec.rb
index 20c07f4d6ac..e4a36f654e5 100644
--- a/spec/features/clusters/cluster_health_dashboard_spec.rb
+++ b/spec/features/clusters/cluster_health_dashboard_spec.rb
@@ -63,21 +63,33 @@ RSpec.describe 'Cluster Health board', :js, :kubeclient, :use_clean_rails_memory
context 'connected, prometheus returns data' do
before do
stub_connected
- end
- it 'renders charts' do
visit cluster_path
click_link 'Health'
wait_for_requests
+ end
+ it 'renders charts' do
expect(page).to have_css('.prometheus-graphs')
expect(page).to have_css('.prometheus-graph')
expect(page).to have_css('.prometheus-graph-title')
expect(page).to have_css('[_echarts_instance_]')
+ expect(page).to have_css('.prometheus-graph', count: 2)
expect(page).to have_content('Avg')
end
+
+ it 'focuses the single panel on toggle', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338341' do
+ click_button('More actions')
+ click_button('Expand panel')
+
+ expect(page).to have_css('.prometheus-graph', count: 1)
+
+ click_button('Collapse panel')
+
+ expect(page).to have_css('.prometheus-graph', count: 2)
+ end
end
def stub_empty_response
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index d0f8767884e..de6cb53fdfa 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe 'Value Stream Analytics', :js do
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:stage_table_selector) { '[data-testid="vsa-stage-table"]' }
+ let_it_be(:metrics_selector) { "[data-testid='vsa-time-metrics']" }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
let(:milestone) { create(:milestone, project: project) }
@@ -25,11 +27,13 @@ RSpec.describe 'Value Stream Analytics', :js do
wait_for_requests
end
- it 'shows pipeline summary' do
- expect(new_issues_counter).to have_content('-')
- expect(commits_counter).to have_content('-')
- expect(deploys_counter).to have_content('-')
- expect(deployment_frequency_counter).to have_content('-')
+ it 'displays metrics' do
+ aggregate_failures 'with relevant values' do
+ expect(new_issues_counter).to have_content('-')
+ expect(commits_counter).to have_content('-')
+ expect(deploys_counter).to have_content('-')
+ expect(deployment_frequency_counter).to have_content('-')
+ end
end
it 'shows active stage with empty message' do
@@ -45,9 +49,9 @@ RSpec.describe 'Value Stream Analytics', :js do
@build = create_cycle(user, project, issue, mr, milestone, pipeline)
deploy_master(user, project)
- issue.metrics.update!(first_mentioned_in_commit_at: issue.metrics.first_associated_with_milestone_at + 1.day)
+ issue.metrics.update!(first_mentioned_in_commit_at: issue.metrics.first_associated_with_milestone_at + 1.hour)
merge_request = issue.merge_requests_closing_issues.first.merge_request
- merge_request.update!(created_at: issue.metrics.first_associated_with_milestone_at + 1.day)
+ merge_request.update!(created_at: issue.metrics.first_associated_with_milestone_at + 1.hour)
merge_request.metrics.update!(
latest_build_started_at: 4.hours.ago,
latest_build_finished_at: 3.hours.ago,
@@ -59,11 +63,15 @@ RSpec.describe 'Value Stream Analytics', :js do
visit project_cycle_analytics_path(project)
end
- it 'shows pipeline summary' do
- expect(new_issues_counter).to have_content('1')
- expect(commits_counter).to have_content('2')
- expect(deploys_counter).to have_content('1')
- expect(deployment_frequency_counter).to have_content('0')
+ it 'displays metrics' do
+ metrics_tiles = page.find(metrics_selector)
+
+ aggregate_failures 'with relevant values' do
+ expect(metrics_tiles).to have_content('Commit')
+ expect(metrics_tiles).to have_content('Deploy')
+ expect(metrics_tiles).to have_content('Deployment Frequency')
+ expect(metrics_tiles).to have_content('New Issue')
+ end
end
it 'shows data on each stage', :sidekiq_might_not_need_inline do
@@ -76,13 +84,13 @@ RSpec.describe 'Value Stream Analytics', :js do
expect_merge_request_to_be_present
click_stage('Test')
- expect_build_to_be_present
+ expect_merge_request_to_be_present
click_stage('Review')
expect_merge_request_to_be_present
click_stage('Staging')
- expect_build_to_be_present
+ expect_merge_request_to_be_present
end
context "when I change the time period observed" do
@@ -95,7 +103,7 @@ RSpec.describe 'Value Stream Analytics', :js do
end
it 'shows only relevant data' do
- expect(new_issues_counter).to have_content('1')
+ expect(new_issue_counter).to have_content('1')
end
end
end
@@ -115,60 +123,55 @@ RSpec.describe 'Value Stream Analytics', :js do
end
it 'does not show the commit stats' do
- expect(page).to have_no_selector(:xpath, commits_counter_selector)
+ expect(page.find(metrics_selector)).not_to have_selector("#commits")
end
it 'needs permissions to see restricted stages' do
- expect(find('.stage-events')).to have_content(issue.title)
+ expect(find(stage_table_selector)).to have_content(issue.title)
click_stage('Code')
- expect(find('.stage-events')).to have_content('You need permission.')
+ expect(find(stage_table_selector)).to have_content('You need permission.')
click_stage('Review')
- expect(find('.stage-events')).to have_content('You need permission.')
+ expect(find(stage_table_selector)).to have_content('You need permission.')
end
end
- def new_issues_counter
- find(:xpath, "//p[contains(text(),'New Issue')]/preceding-sibling::h3")
+ def find_metric_tile(sel)
+ page.find("#{metrics_selector} #{sel}")
end
- def commits_counter_selector
- "//p[contains(text(),'Commits')]/preceding-sibling::h3"
+ # When now use proper pluralization for the metric names, which affects the id
+ def new_issue_counter
+ find_metric_tile("#new-issue")
end
- def commits_counter
- find(:xpath, commits_counter_selector)
+ def new_issues_counter
+ find_metric_tile("#new-issues")
end
- def deploys_counter
- find(:xpath, "//p[contains(text(),'Deploy')]/preceding-sibling::h3", match: :first)
+ def commits_counter
+ find_metric_tile("#commits")
end
- def deployment_frequency_counter_selector
- "//p[contains(text(),'Deployment Frequency')]/preceding-sibling::h3"
+ def deploys_counter
+ find_metric_tile("#deploys")
end
def deployment_frequency_counter
- find(:xpath, deployment_frequency_counter_selector)
+ find_metric_tile("#deployment-frequency")
end
def expect_issue_to_be_present
- expect(find('.stage-events')).to have_content(issue.title)
- expect(find('.stage-events')).to have_content(issue.author.name)
- expect(find('.stage-events')).to have_content("##{issue.iid}")
- end
-
- def expect_build_to_be_present
- expect(find('.stage-events')).to have_content(@build.ref)
- expect(find('.stage-events')).to have_content(@build.short_sha)
- expect(find('.stage-events')).to have_content("##{@build.id}")
+ expect(find(stage_table_selector)).to have_content(issue.title)
+ expect(find(stage_table_selector)).to have_content(issue.author.name)
+ expect(find(stage_table_selector)).to have_content("##{issue.iid}")
end
def expect_merge_request_to_be_present
- expect(find('.stage-events')).to have_content(mr.title)
- expect(find('.stage-events')).to have_content(mr.author.name)
- expect(find('.stage-events')).to have_content("!#{mr.iid}")
+ expect(find(stage_table_selector)).to have_content(mr.title)
+ expect(find(stage_table_selector)).to have_content(mr.author.name)
+ expect(find(stage_table_selector)).to have_content("!#{mr.iid}")
end
def click_stage(stage_name)
diff --git a/spec/features/dashboard/active_tab_spec.rb b/spec/features/dashboard/active_tab_spec.rb
deleted file mode 100644
index aa767d75c00..00000000000
--- a/spec/features/dashboard/active_tab_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# TODO: This entire spec file can be deleted once the combined_menu feature is fully rolled
-# out and the flag is removed, because it will then be irrelevant (there will be no more tabs).
-# Feature flag removal issue: https://gitlab.com/gitlab-org/gitlab/-/issues/324086
-RSpec.describe 'Dashboard Active Tab', :js do
- shared_examples 'combined_menu: feature flag examples' do
- before do
- sign_in(create(:user))
- end
-
- shared_examples 'page has active tab' do |title|
- it "#{title} tab" do
- subject
-
- expect(page).to have_selector('.navbar-sub-nav li.active', count: 1)
- expect(find('.navbar-sub-nav li.active')).to have_content(title)
- end
- end
-
- context 'on dashboard projects' do
- it_behaves_like 'page has active tab', 'Projects' do
- subject { visit dashboard_projects_path }
- end
- end
-
- context 'on dashboard groups' do
- it_behaves_like 'page has active tab', 'Groups' do
- subject { visit dashboard_groups_path }
- end
- end
- end
-
- context 'with combined_menu feature flag off' do
- before do
- stub_feature_flags(combined_menu: false)
- end
-
- it_behaves_like 'combined_menu: feature flag examples'
- end
-end
diff --git a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
index 3dd993b4bb5..6861fac3cc2 100644
--- a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
+++ b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
@@ -2,66 +2,44 @@
require 'spec_helper'
-RSpec.describe 'The group dashboard' do
+RSpec.describe 'The group dashboard', :js do
include ExternalAuthorizationServiceHelpers
include Spec::Support::Helpers::Features::TopNavSpecHelpers
let(:user) { create(:user) }
- shared_examples 'combined_menu: feature flag examples' do
- before do
- sign_in user
- end
-
- describe 'The top navigation' do
- it 'has all the expected links' do
- visit dashboard_groups_path
-
- open_top_nav
-
- within_top_nav do
- expect(page).to have_button('Projects')
- expect(page).to have_button('Groups')
- expect(page).to have_link('Activity')
- expect(page).to have_link('Milestones')
- expect(page).to have_link('Snippets')
- end
- end
+ before do
+ sign_in user
+ end
- it 'hides some links when an external authorization service is enabled' do
- enable_external_authorization_service_check
- visit dashboard_groups_path
+ describe 'The top navigation' do
+ it 'has all the expected links' do
+ visit dashboard_groups_path
- open_top_nav
+ open_top_nav
- within_top_nav do
- expect(page).to have_button('Projects')
- expect(page).to have_button('Groups')
- expect(page).not_to have_link('Activity')
- expect(page).not_to have_link('Milestones')
- expect(page).to have_link('Snippets')
- end
+ within_top_nav do
+ expect(page).to have_button('Projects')
+ expect(page).to have_button('Groups')
+ expect(page).to have_link('Activity')
+ expect(page).to have_link('Milestones')
+ expect(page).to have_link('Snippets')
end
end
- end
- context 'with combined_menu feature flag on', :js do
- let(:needs_rewrite_for_combined_menu_flag_on) { true }
+ it 'hides some links when an external authorization service is enabled' do
+ enable_external_authorization_service_check
+ visit dashboard_groups_path
- before do
- stub_feature_flags(combined_menu: true)
- end
-
- it_behaves_like 'combined_menu: feature flag examples'
- end
+ open_top_nav
- context 'with combined_menu feature flag off' do
- let(:needs_rewrite_for_combined_menu_flag_on) { false }
-
- before do
- stub_feature_flags(combined_menu: false)
+ within_top_nav do
+ expect(page).to have_button('Projects')
+ expect(page).to have_button('Groups')
+ expect(page).not_to have_link('Activity')
+ expect(page).not_to have_link('Milestones')
+ expect(page).to have_link('Snippets')
+ end
end
-
- it_behaves_like 'combined_menu: feature flag examples'
end
end
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 1f0981de7e1..27419479479 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -194,6 +194,29 @@ RSpec.describe 'Dashboard Projects' do
end
end
+ describe 'with topics' do
+ context 'when project has topics' do
+ before do
+ project.update_attribute(:topic_list, 'topic1')
+ end
+
+ it 'shows project topics if exist' do
+ visit dashboard_projects_path
+
+ expect(page).to have_selector('[data-testid="project_topic_list"]')
+ expect(page).to have_link('topic1', href: explore_projects_path(topic: 'topic1'))
+ end
+ end
+
+ context 'when project does not have topics' do
+ it 'does not show project topics' do
+ visit dashboard_projects_path
+
+ expect(page).not_to have_selector('[data-testid="project_topic_list"]')
+ end
+ end
+ end
+
context 'last push widget', :use_clean_rails_memory_store_caching do
before do
event = create(:push_event, project: project, author: user)
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 7439bfd334b..3f3ab4218f2 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -3,89 +3,71 @@
require 'spec_helper'
RSpec.describe 'Dashboard shortcuts', :js do
- shared_examples 'combined_menu: feature flag examples' do
- context 'logged in' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ context 'logged in' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
- before do
- project.add_developer(user)
- sign_in(user)
- visit root_dashboard_path
- end
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ visit root_dashboard_path
+ end
- it 'navigate to tabs' do
- find('body').send_keys([:shift, 'I'])
+ it 'navigate to tabs' do
+ find('body').send_keys([:shift, 'I'])
- check_page_title('Issues')
+ check_page_title('Issues')
- find('body').send_keys([:shift, 'M'])
+ find('body').send_keys([:shift, 'M'])
- check_page_title('Merge requests')
+ check_page_title('Merge requests')
- find('body').send_keys([:shift, 'T'])
+ find('body').send_keys([:shift, 'T'])
- check_page_title('To-Do List')
+ check_page_title('To-Do List')
- find('body').send_keys([:shift, 'G'])
+ find('body').send_keys([:shift, 'G'])
- check_page_title('Groups')
+ check_page_title('Groups')
- find('body').send_keys([:shift, 'P'])
+ find('body').send_keys([:shift, 'P'])
- check_page_title('Projects')
+ check_page_title('Projects')
- find('body').send_keys([:shift, 'A'])
+ find('body').send_keys([:shift, 'A'])
- check_page_title('Activity')
+ check_page_title('Activity')
- find('body').send_keys([:shift, 'L'])
+ find('body').send_keys([:shift, 'L'])
- check_page_title('Milestones')
- end
+ check_page_title('Milestones')
end
+ end
- context 'logged out' do
- before do
- visit explore_root_path
- end
-
- it 'navigate to tabs' do
- find('body').send_keys([:shift, 'G'])
+ context 'logged out' do
+ before do
+ visit explore_root_path
+ end
- find('.nothing-here-block')
- expect(page).to have_content('No public groups')
+ it 'navigate to tabs' do
+ find('body').send_keys([:shift, 'G'])
- find('body').send_keys([:shift, 'S'])
+ find('.nothing-here-block')
+ expect(page).to have_content('No public groups')
- find('.nothing-here-block')
- expect(page).to have_content('No snippets found')
+ find('body').send_keys([:shift, 'S'])
- find('body').send_keys([:shift, 'P'])
+ find('.nothing-here-block')
+ expect(page).to have_content('No snippets found')
- find('.nothing-here-block')
- expect(page).to have_content('Explore public groups to find projects to contribute to.')
- end
- end
+ find('body').send_keys([:shift, 'P'])
- def check_page_title(title)
- expect(find('.page-title')).to have_content(title)
+ find('.nothing-here-block')
+ expect(page).to have_content('Explore public groups to find projects to contribute to.')
end
end
- context 'with combined_menu feature flag on' do
- before do
- stub_feature_flags(combined_menu: true)
- end
-
- it_behaves_like 'combined_menu: feature flag examples'
- end
-
- context 'with combined_menu feature flag off' do
- before do
- stub_feature_flags(combined_menu: false)
- end
-
- it_behaves_like 'combined_menu: feature flag examples'
+ def check_page_title(title)
+ expect(find('.page-title')).to have_content(title)
end
end
diff --git a/spec/features/frequently_visited_projects_and_groups_spec.rb b/spec/features/frequently_visited_projects_and_groups_spec.rb
index 5ea42ce39e3..6bc3b745851 100644
--- a/spec/features/frequently_visited_projects_and_groups_spec.rb
+++ b/spec/features/frequently_visited_projects_and_groups_spec.rb
@@ -7,67 +7,45 @@ RSpec.describe 'Frequently visited items', :js do
let_it_be(:user) { create(:user) }
- shared_examples 'combined_menu: feature flag examples' do
- before do
- sign_in(user)
- end
-
- context 'for projects' do
- let_it_be(:project) { create(:project, :public) }
+ before do
+ sign_in(user)
+ end
- it 'increments localStorage counter when visiting the project' do
- visit project_path(project)
- open_top_nav_projects
+ context 'for projects' do
+ let_it_be(:project) { create(:project, :public) }
- frequent_projects = nil
+ it 'increments localStorage counter when visiting the project' do
+ visit project_path(project)
+ open_top_nav_projects
- wait_for('localStorage frequent-projects') do
- frequent_projects = page.evaluate_script("localStorage['#{user.username}/frequent-projects']")
+ frequent_projects = nil
- frequent_projects.present?
- end
+ wait_for('localStorage frequent-projects') do
+ frequent_projects = page.evaluate_script("localStorage['#{user.username}/frequent-projects']")
- expect(Gitlab::Json.parse(frequent_projects)).to contain_exactly(a_hash_including('id' => project.id, 'frequency' => 1))
+ frequent_projects.present?
end
- end
-
- context 'for groups' do
- let_it_be(:group) { create(:group, :public) }
-
- it 'increments localStorage counter when visiting the group' do
- visit group_path(group)
- open_top_nav_groups
-
- frequent_groups = nil
-
- wait_for('localStorage frequent-groups') do
- frequent_groups = page.evaluate_script("localStorage['#{user.username}/frequent-groups']")
- frequent_groups.present?
- end
-
- expect(Gitlab::Json.parse(frequent_groups)).to contain_exactly(a_hash_including('id' => group.id, 'frequency' => 1))
- end
+ expect(Gitlab::Json.parse(frequent_projects)).to contain_exactly(a_hash_including('id' => project.id, 'frequency' => 1))
end
end
- context 'with combined_menu feature flag on' do
- let(:needs_rewrite_for_combined_menu_flag_on) { true }
+ context 'for groups' do
+ let_it_be(:group) { create(:group, :public) }
- before do
- stub_feature_flags(combined_menu: true)
- end
+ it 'increments localStorage counter when visiting the group' do
+ visit group_path(group)
+ open_top_nav_groups
- it_behaves_like 'combined_menu: feature flag examples'
- end
+ frequent_groups = nil
- context 'with combined_menu feature flag off' do
- let(:needs_rewrite_for_combined_menu_flag_on) { false }
+ wait_for('localStorage frequent-groups') do
+ frequent_groups = page.evaluate_script("localStorage['#{user.username}/frequent-groups']")
- before do
- stub_feature_flags(combined_menu: false)
- end
+ frequent_groups.present?
+ end
- it_behaves_like 'combined_menu: feature flag examples'
+ expect(Gitlab::Json.parse(frequent_groups)).to contain_exactly(a_hash_including('id' => group.id, 'frequency' => 1))
+ end
end
end
diff --git a/spec/features/groups/board_sidebar_spec.rb b/spec/features/groups/board_sidebar_spec.rb
index 690d661ba2f..e2dd2fecab7 100644
--- a/spec/features/groups/board_sidebar_spec.rb
+++ b/spec/features/groups/board_sidebar_spec.rb
@@ -19,8 +19,6 @@ RSpec.describe 'Group Issue Boards', :js do
let(:card) { find('.board:nth-child(1)').first('.board-card') }
before do
- # stubbing until sidebar work is done: https://gitlab.com/gitlab-org/gitlab/-/issues/230711
- stub_feature_flags(graphql_board_lists: false)
sign_in(user)
visit group_board_path(group, board)
@@ -32,6 +30,32 @@ RSpec.describe 'Group Issue Boards', :js do
click_card(card)
page.within('.labels') do
+ click_button 'Edit'
+
+ wait_for_requests
+
+ page.within('[data-testid="dropdown-content"]') do
+ expect(page).to have_content(project_1_label.title)
+ expect(page).to have_content(group_label.title)
+ expect(page).not_to have_content(project_2_label.title)
+ end
+ end
+ end
+ end
+
+ context 'when graphql_board_lists FF disabled' do
+ before do
+ stub_feature_flags(graphql_board_lists: false)
+ sign_in(user)
+
+ visit group_board_path(group, board)
+ wait_for_requests
+ end
+
+ it 'only shows valid labels for the issue project and group' do
+ click_card(card)
+
+ page.within('.labels') do
click_link 'Edit'
wait_for_requests
diff --git a/spec/features/groups/board_spec.rb b/spec/features/groups/board_spec.rb
index b4c60ff4fa3..afe36dabcb5 100644
--- a/spec/features/groups/board_spec.rb
+++ b/spec/features/groups/board_spec.rb
@@ -3,16 +3,21 @@
require 'spec_helper'
RSpec.describe 'Group Boards' do
- let(:group) { create(:group) }
- let!(:project) { create(:project_empty_repo, group: group) }
- let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
+ include DragTo
+ include MobileHelpers
+ include BoardHelpers
- before do
- sign_in(user)
- end
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ context 'Creates an issue', :js do
+ let_it_be(:project) { create(:project_empty_repo, group: group) }
- context 'Creates a an issue', :js do
before do
+ group.add_maintainer(user)
+
+ sign_in(user)
+
visit group_boards_path(group)
end
@@ -39,4 +44,58 @@ RSpec.describe 'Group Boards' do
end
end
end
+
+ context "when user is a Reporter in one of the group's projects", :js do
+ let_it_be(:board) { create(:board, group: group) }
+
+ let_it_be(:backlog_list) { create(:backlog_list, board: board) }
+ let_it_be(:group_label1) { create(:group_label, title: "bug", group: group) }
+ let_it_be(:group_label2) { create(:group_label, title: "dev", group: group) }
+ let_it_be(:list1) { create(:list, board: board, label: group_label1, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: group_label2, position: 1) }
+
+ let_it_be(:project1) { create(:project_empty_repo, :private, group: group) }
+ let_it_be(:project2) { create(:project_empty_repo, :private, group: group) }
+ let_it_be(:issue1) { create(:issue, title: 'issue1', project: project1, labels: [group_label2]) }
+ let_it_be(:issue2) { create(:issue, title: 'issue2', project: project2) }
+
+ before do
+ project1.add_guest(user)
+ project2.add_reporter(user)
+
+ sign_in(user)
+
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ visit group_boards_path(group)
+ end
+ end
+
+ it 'allows user to move issue of project where they are a Reporter' do
+ expect(find('.board:nth-child(1)')).to have_content(issue2.title)
+
+ drag(list_from_index: 0, from_index: 0, list_to_index: 1)
+
+ expect(find('.board:nth-child(2)')).to have_content(issue2.title)
+ expect(issue2.reload.labels).to contain_exactly(group_label1)
+ end
+
+ it 'does not allow user to move issue of project where they are a Guest' do
+ expect(find('.board:nth-child(3)')).to have_content(issue1.title)
+
+ drag(list_from_index: 2, from_index: 0, list_to_index: 1)
+
+ expect(find('.board:nth-child(3)')).to have_content(issue1.title)
+ expect(issue1.reload.labels).to contain_exactly(group_label2)
+ expect(issue2.reload.labels).to eq([])
+ end
+
+ it 'does not allow user to re-position lists' do
+ drag(list_from_index: 1, list_to_index: 2, selector: '.board-header')
+
+ expect(find('.board:nth-child(2) [data-testid="board-list-header"]')).to have_content(group_label1.title)
+ expect(find('.board:nth-child(3) [data-testid="board-list-header"]')).to have_content(group_label2.title)
+ expect(list1.reload.position).to eq(0)
+ expect(list2.reload.position).to eq(1)
+ end
+ end
end
diff --git a/spec/features/groups/integrations/user_activates_mattermost_slash_command_spec.rb b/spec/features/groups/integrations/user_activates_mattermost_slash_command_spec.rb
index 7703268af39..02aa418cd73 100644
--- a/spec/features/groups/integrations/user_activates_mattermost_slash_command_spec.rb
+++ b/spec/features/groups/integrations/user_activates_mattermost_slash_command_spec.rb
@@ -13,4 +13,9 @@ RSpec.describe 'User activates the group-level Mattermost Slash Command integrat
let(:edit_path) { edit_group_settings_integration_path(group, :mattermost_slash_commands) }
include_examples 'user activates the Mattermost Slash Command integration'
+
+ it 'does not display the overrides tab' do
+ expect(page).not_to have_link('Settings', href: edit_path)
+ expect(page).not_to have_link('Projects using custom settings', href: overrides_admin_application_settings_integration_path(:mattermost_slash_commands))
+ end
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 1d57d0a9103..38e829bafcc 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -84,6 +84,33 @@ RSpec.describe 'Groups > Members > Manage members' do
property: 'existing_user',
user: user1
)
+ expect_no_snowplow_event(
+ category: 'Members::CreateService',
+ action: 'area_of_focus'
+ )
+ end
+
+ it 'adds a user to group with area_of_focus', :js, :snowplow, :aggregate_failures do
+ stub_experiments(member_areas_of_focus: :candidate)
+ group.add_owner(user1)
+
+ visit group_group_members_path(group)
+
+ invite_member(user2.name, role: 'Reporter', area_of_focus: true)
+ wait_for_requests
+
+ expect_snowplow_event(
+ category: 'Members::CreateService',
+ action: 'area_of_focus',
+ label: 'Contribute to the codebase',
+ property: group.members.last.id.to_s
+ )
+ expect_snowplow_event(
+ category: 'Members::CreateService',
+ action: 'area_of_focus',
+ label: 'Collaborate on open issues and merge requests',
+ property: group.members.last.id.to_s
+ )
end
it 'do not disclose email addresses', :js do
@@ -193,9 +220,36 @@ RSpec.describe 'Groups > Members > Manage members' do
property: 'net_new_user',
user: user1
)
+ expect_no_snowplow_event(
+ category: 'Members::CreateService',
+ action: 'area_of_focus'
+ )
end
end
+ it 'invite user to group with area_of_focus', :js, :snowplow, :aggregate_failures do
+ stub_experiments(member_areas_of_focus: :candidate)
+ group.add_owner(user1)
+
+ visit group_group_members_path(group)
+
+ invite_member('test@example.com', role: 'Reporter', area_of_focus: true)
+ wait_for_requests
+
+ expect_snowplow_event(
+ category: 'Members::InviteService',
+ action: 'area_of_focus',
+ label: 'Contribute to the codebase',
+ property: group.members.last.id.to_s
+ )
+ expect_snowplow_event(
+ category: 'Members::InviteService',
+ action: 'area_of_focus',
+ label: 'Collaborate on open issues and merge requests',
+ property: group.members.last.id.to_s
+ )
+ end
+
context 'when user is a guest' do
before do
group.add_guest(user1)
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index 752303fdd78..9a7950266a5 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -52,6 +52,8 @@ RSpec.describe 'Group Packages' do
it_behaves_like 'package details link'
end
+ it_behaves_like 'package details link'
+
it 'allows you to navigate to the project page' do
find('[data-testid="root-link"]', text: project.name).click
diff --git a/spec/features/groups/settings/manage_applications_spec.rb b/spec/features/groups/settings/manage_applications_spec.rb
new file mode 100644
index 00000000000..5f84f61678d
--- /dev/null
+++ b/spec/features/groups/settings/manage_applications_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User manages applications' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:new_application_path) { group_settings_applications_path(group) }
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ include_examples 'manage applications'
+end
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
index 551a0bc5375..835555480dd 100644
--- a/spec/features/groups/settings/packages_and_registries_spec.rb
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe 'Group Packages & Registries settings' do
end
def find_settings_menu
- find('ul[data-testid="group-settings-menu"]')
+ find('.shortcuts-settings ul')
end
def visit_settings_page
diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb
index c258dd41b03..abf56232aff 100644
--- a/spec/features/groups/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'User searches group settings', :js do
visit group_settings_ci_cd_path(group)
end
- it_behaves_like 'can search settings', 'Variables', 'Runners'
+ it_behaves_like 'can search settings', 'Variables', 'Auto DevOps'
end
context 'in Packages & Registries page' do
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index efde570512f..9c11b84fa8f 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe 'Group' do
click_button 'Create group'
expect(current_path).to eq(new_group_path)
- expect(page).to have_text('Please choose a group URL with no special characters.')
+ expect(page).to have_text('Please choose a group URL with no special characters or spaces.')
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index b7e1004aef5..d56bedd4852 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -141,6 +141,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
let(:invite_email) { new_user.email }
let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email, created_by: owner) }
let(:send_email_confirmation) { true }
+ let(:extra_params) { { invite_type: Emails::Members::INITIAL_INVITE } }
before do
stub_application_setting(send_user_confirmation_email: send_email_confirmation)
@@ -148,7 +149,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
context 'when registering using invitation email' do
before do
- visit invite_path(group_invite.raw_invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE)
+ visit invite_path(group_invite.raw_invite_token, extra_params)
end
context 'with admin approval required enabled' do
@@ -188,11 +189,28 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
context 'email confirmation enabled' do
- context 'with members/invite_email experiment', :experiment do
+ context 'with invite email acceptance', :snowplow do
it 'tracks the accepted invite' do
- expect(experiment('members/invite_email')).to track(:accepted)
- .with_context(actor: group_invite)
- .on_next_instance
+ fill_in_sign_up_form(new_user)
+
+ expect_snowplow_event(
+ category: 'RegistrationsController',
+ action: 'accepted',
+ label: 'invite_email',
+ property: group_invite.id.to_s
+ )
+ end
+ end
+
+ context 'with invite email acceptance for the invite_email_preview_text experiment', :experiment do
+ let(:extra_params) do
+ { invite_type: Emails::Members::INITIAL_INVITE, experiment_name: 'invite_email_preview_text' }
+ end
+
+ it 'tracks the accepted invite' do
+ expect(experiment(:invite_email_preview_text)).to track(:accepted)
+ .with_context(actor: group_invite)
+ .on_next_instance
fill_in_sign_up_form(new_user)
end
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index a4c0a84af7d..077c363f78b 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
context 'resolving the thread' do
before do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
it 'hides the link for creating a new issue' do
diff --git a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
index ac3471e8401..3ff8fc5ecca 100644
--- a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'Resolve an open thread in a merge request by creating an issue',
context 'resolving the thread' do
before do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
it 'hides the link for creating a new issue' do
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index 5ca20028485..4bad67acc87 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -6,13 +6,13 @@ RSpec.describe 'New/edit issue', :js do
include ActionView::Helpers::JavaScriptHelper
include FormHelper
- let!(:project) { create(:project) }
- let!(:user) { create(:user)}
- let!(:user2) { create(:user)}
- let!(:milestone) { create(:milestone, project: project) }
- let!(:label) { create(:label, project: project) }
- let!(:label2) { create(:label, project: project) }
- let!(:issue) { create(:issue, project: project, assignees: [user], milestone: milestone) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user)}
+ let_it_be(:user2) { create(:user)}
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
+ let_it_be(:issue) { create(:issue, project: project, assignees: [user], milestone: milestone) }
before do
stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)
@@ -234,6 +234,28 @@ RSpec.describe 'New/edit issue', :js do
expect(page).to have_selector('.atwho-view')
end
+ describe 'displays issue type options in the dropdown' do
+ before do
+ page.within('.issue-form') do
+ click_button 'Issue'
+ end
+ end
+
+ it 'correctly displays the Issue type option with an icon', :aggregate_failures do
+ page.within('[data-testid="issue-type-select-dropdown"]') do
+ expect(page).to have_selector('[data-testid="issue-type-issue-icon"]')
+ expect(page).to have_content('Issue')
+ end
+ end
+
+ it 'correctly displays the Incident type option with an icon', :aggregate_failures do
+ page.within('[data-testid="issue-type-select-dropdown"]') do
+ expect(page).to have_selector('[data-testid="issue-type-incident-icon"]')
+ expect(page).to have_content('Incident')
+ end
+ end
+ end
+
describe 'milestone' do
let!(:milestone) { create(:milestone, title: '">&lt;img src=x onerror=alert(document.domain)&gt;', project: project) }
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 0e2ef5cc6eb..e198d9d4ebb 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -408,7 +408,7 @@ RSpec.describe 'Issue Sidebar' do
context 'sidebar', :js do
it 'finds issue copy forwarding email' do
- expect(find('[data-qa-selector="copy-forward-email"]').text).to eq "Issue email: #{issue.creatable_note_email_address(user)}"
+ expect(find('[data-qa-selector="copy-forward-email"]').text).to eq "Issue email: #{issue.creatable_note_email_address(user)}" # rubocop:disable QA/SelectorUsage
end
end
@@ -444,7 +444,7 @@ RSpec.describe 'Issue Sidebar' do
end
it 'does not find issue email' do
- expect(page).not_to have_selector('[data-qa-selector="copy-forward-email"]')
+ expect(page).not_to have_selector('[data-qa-selector="copy-forward-email"]') # rubocop:disable QA/SelectorUsage
end
end
end
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 4a77e850d51..f46aa5c21b6 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -182,7 +182,7 @@ RSpec.describe "User creates issue" do
end
it 'does not hide the milestone select' do
- expect(page).to have_selector('.qa-issuable-milestone-dropdown')
+ expect(page).to have_selector('.qa-issuable-milestone-dropdown') # rubocop:disable QA/SelectorUsage
end
end
@@ -202,11 +202,11 @@ RSpec.describe "User creates issue" do
end
it 'shows the milestone select' do
- expect(page).to have_selector('.qa-issuable-milestone-dropdown')
+ expect(page).to have_selector('.qa-issuable-milestone-dropdown') # rubocop:disable QA/SelectorUsage
end
it 'hides the weight input' do
- expect(page).not_to have_selector('.qa-issuable-weight-input')
+ expect(page).not_to have_selector('.qa-issuable-weight-input') # rubocop:disable QA/SelectorUsage
end
it 'shows the incident help text' do
diff --git a/spec/features/jira_connect/branches_spec.rb b/spec/features/jira_connect/branches_spec.rb
new file mode 100644
index 00000000000..6fa600c6906
--- /dev/null
+++ b/spec/features/jira_connect/branches_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create GitLab branches from Jira', :js do
+ let_it_be(:alice) { create(:user, name: 'Alice') }
+ let_it_be(:bob) { create(:user, name: 'Bob') }
+
+ let_it_be(:project1) { create(:project, :repository, namespace: alice.namespace, title: 'foo') }
+ let_it_be(:project2) { create(:project, :repository, namespace: alice.namespace, title: 'bar') }
+ let_it_be(:project3) { create(:project, namespace: bob.namespace) }
+
+ let(:source_branch) { 'my-source-branch' }
+ let(:new_branch) { 'my-new-branch' }
+
+ before do
+ project2.repository.add_branch(alice, source_branch, 'master')
+ sign_in(alice)
+ end
+
+ def within_dropdown(&block)
+ within('.dropdown-menu', &block)
+ end
+
+ it 'select project and branch and submit the form' do
+ visit new_jira_connect_branch_path(issue_key: 'ACME-123', issue_summary: 'My issue !@#$% title')
+
+ expect(page).to have_field('Branch name', with: 'ACME-123-my-issue-title')
+ expect(page).to have_button('Create branch', disabled: true)
+
+ # Select project1
+
+ click_on 'Select a project'
+
+ within_dropdown do
+ expect(page).to have_text('Alice / foo')
+ expect(page).to have_text('Alice / bar')
+ expect(page).not_to have_text('Bob /')
+
+ fill_in 'Search', with: 'foo'
+
+ expect(page).not_to have_text('Alice / bar')
+
+ click_on 'Alice / foo'
+ end
+
+ expect(page).to have_button('Create branch', disabled: false)
+
+ click_on 'master'
+
+ within_dropdown do
+ fill_in 'Search', with: source_branch
+
+ expect(page).not_to have_text(source_branch)
+
+ fill_in 'Search', with: 'master'
+
+ expect(page).to have_text('master')
+ end
+
+ # Switch to project2
+
+ click_on 'Alice / foo'
+
+ within_dropdown do
+ fill_in 'Search', with: ''
+ click_on 'Alice / bar'
+ end
+
+ click_on 'master'
+
+ within_dropdown do
+ fill_in 'Search', with: source_branch
+ click_on source_branch
+ end
+
+ fill_in 'Branch name', with: new_branch
+ click_on 'Create branch'
+
+ expect(page).to have_text('New branch was successfully created. You can now close this window and return to Jira.')
+
+ expect(project1.commit(new_branch)).to be_nil
+ expect(project2.commit(new_branch)).not_to be_nil
+ expect(project2.commit(new_branch)).to eq(project2.commit(source_branch))
+ end
+end
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index 0a2f81986be..fca5e946d0c 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe 'Labels Hierarchy', :js do
let!(:project_label_1) { create(:label, project: project_1, title: 'Label_4') }
before do
- stub_feature_flags(graphql_board_lists: false)
stub_feature_flags(board_new_list: false)
grandparent.add_owner(user)
@@ -25,20 +24,21 @@ RSpec.describe 'Labels Hierarchy', :js do
end
shared_examples 'assigning labels from sidebar' do
- it 'can assign all ancestors labels', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/27952' do
+ it 'can assign all ancestors labels' do
[grandparent_group_label, parent_group_label, project_label_1].each do |label|
page.within('.block.labels') do
- find('.edit-link').click
+ click_on 'Edit'
end
wait_for_requests
find('a.label-item', text: label.title).click
- find('.dropdown-menu-close-icon').click
+ wait_for_requests
+ click_on 'Close'
wait_for_requests
- expect(page).to have_selector('.badge', text: label.title)
+ expect(page).to have_selector('.gl-label', text: label.title)
end
end
@@ -215,6 +215,44 @@ RSpec.describe 'Labels Hierarchy', :js do
end
end
+ context 'issuable sidebar when graphql_board_lists FF disabled' do
+ let!(:issue) { create(:issue, project: project_1) }
+
+ before do
+ stub_feature_flags(graphql_board_lists: false)
+ end
+
+ context 'on project board issue sidebar' do
+ before do
+ project_1.add_developer(user)
+ board = create(:board, project: project_1)
+
+ visit project_board_path(project_1, board)
+
+ wait_for_requests
+
+ find('.board-card').click
+ end
+
+ it_behaves_like 'assigning labels from sidebar'
+ end
+
+ context 'on group board issue sidebar' do
+ before do
+ parent.add_developer(user)
+ board = create(:board, group: parent)
+
+ visit group_board_path(parent, board)
+
+ wait_for_requests
+
+ find('.board-card').click
+ end
+
+ it_behaves_like 'assigning labels from sidebar'
+ end
+ end
+
context 'issuable filtering' do
let!(:labeled_issue) { create(:labeled_issue, project: project_1, labels: [grandparent_group_label, parent_group_label, project_label_1]) }
let!(:issue) { create(:issue, project: project_1) }
@@ -302,6 +340,34 @@ RSpec.describe 'Labels Hierarchy', :js do
let(:board) { create(:board, group: parent) }
before do
+ parent.add_developer(user)
+ visit group_board_path(parent, board)
+ find('.js-new-board-list').click
+ wait_for_requests
+ end
+
+ context 'when graphql_board_lists FF enabled' do
+ it 'creates lists from all ancestor group labels' do
+ [grandparent_group_label, parent_group_label].each do |label|
+ find('a', text: label.title).click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_selector('.board-title-text', text: grandparent_group_label.title)
+ expect(page).to have_selector('.board-title-text', text: parent_group_label.title)
+ end
+
+ it 'does not create lists from descendant groups' do
+ expect(page).not_to have_selector('a', text: child_group_label.title)
+ end
+ end
+ end
+
+ context 'when graphql_board_lists FF disabled' do
+ let(:board) { create(:board, group: parent) }
+
+ before do
stub_feature_flags(graphql_board_lists: false)
parent.add_developer(user)
visit group_board_path(parent, board)
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index 9e1b0135932..af5ba14e310 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
context 'when enabled after it was previously canceled' do
before do
click_button "Merge when pipeline succeeds"
- click_link "Cancel"
+ click_button "Cancel auto-merge"
wait_for_requests
@@ -87,7 +87,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
before do
merge_request.merge_params['force_remove_source_branch'] = '0'
merge_request.save!
- click_link "Cancel"
+ click_button "Cancel auto-merge"
end
it_behaves_like 'Merge when pipeline succeeds activator'
@@ -114,7 +114,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
end
it 'allows to cancel the automatic merge' do
- click_link "Cancel"
+ click_button "Cancel auto-merge"
expect(page).to have_button "Merge when pipeline succeeds"
@@ -124,7 +124,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
end
it 'allows to delete source branch' do
- click_link "Delete source branch"
+ click_button "Delete source branch"
expect(page).to have_content "The source branch will be deleted"
end
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index 73e628bda98..8343e04aef1 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to mark thread as resolved' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
expect(page).to have_selector('.discussion-body', visible: false)
@@ -80,7 +80,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to unresolve thread' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
click_button 'Unresolve thread'
end
@@ -92,7 +92,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
describe 'resolved thread' do
before do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
visit_merge_request
@@ -193,7 +193,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to resolve from reply form without a comment' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
page.within '.line-resolve-all-container' do
@@ -230,7 +230,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'hides jump to next button when all resolved' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
expect(page).to have_selector('.discussion-next-btn', visible: false)
@@ -326,7 +326,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to mark all threads as resolved' do
page.all('.discussion-reply-holder', count: 2).each do |reply_holder|
page.within reply_holder do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
end
@@ -338,7 +338,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to quickly scroll to next unresolved thread' do
page.within('.discussion-reply-holder', match: :first) do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
page.within '.line-resolve-all-container' do
@@ -410,7 +410,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to mark thread as resolved' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
end
page.within '.diff-content .note' do
@@ -425,7 +425,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to unresolve thread' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
click_button 'Unresolve thread'
end
@@ -453,7 +453,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to comment & unresolve thread' do
page.within '.diff-content' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
find_field('Reply…').click
diff --git a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
index d6cdc15005b..7b7fff5c936 100644
--- a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
+++ b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Closes #{issue_1.to_reference} and #{issue_2.to_reference}")
+ expect(page).to have_content("Closes issues #{issue_1.to_reference} and #{issue_2.to_reference}")
end
end
@@ -39,7 +39,7 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:merge_request_description) { "Description\n\nRefers to #{issue_1.to_reference} and #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Mentions #{issue_1.to_reference} and #{issue_2.to_reference}")
+ expect(page).to have_content("Mentions issues #{issue_1.to_reference} and #{issue_2.to_reference}")
end
end
@@ -47,8 +47,8 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:merge_request_title) { "closes #{issue_1.to_reference}\n\n refers to #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Closes #{issue_1.to_reference}")
- expect(page).to have_content("Mentions #{issue_2.to_reference}")
+ expect(page).to have_content("Closes issue #{issue_1.to_reference}")
+ expect(page).to have_content("Mentions issue #{issue_2.to_reference}")
end
end
@@ -56,7 +56,7 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:merge_request_title) { "closing #{issue_1.to_reference}, #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Closes #{issue_1.to_reference} and #{issue_2.to_reference}")
+ expect(page).to have_content("Closes issues #{issue_1.to_reference} and #{issue_2.to_reference}")
end
end
@@ -64,7 +64,7 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:merge_request_title) { "Refers to #{issue_1.to_reference} and #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Mentions #{issue_1.to_reference} and #{issue_2.to_reference}")
+ expect(page).to have_content("Mentions issues #{issue_1.to_reference} and #{issue_2.to_reference}")
end
end
@@ -72,8 +72,8 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:merge_request_title) { "closes #{issue_1.to_reference}\n\n refers to #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Closes #{issue_1.to_reference}")
- expect(page).to have_content("Mentions #{issue_2.to_reference}")
+ expect(page).to have_content("Closes issue #{issue_1.to_reference}")
+ expect(page).to have_content("Mentions issue #{issue_2.to_reference}")
end
end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 85eb956033b..2a49109d360 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -43,12 +43,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
let!(:push_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:push)
+ .execute(:push)
+ .payload
end
let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:merge_request_event, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
+ .payload
end
before do
@@ -77,12 +79,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when a user updated a merge request in the parent project', :sidekiq_might_not_need_inline do
let!(:push_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:push)
+ .execute(:push)
+ .payload
end
let!(:detached_merge_request_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:merge_request_event, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
+ .payload
end
before do
@@ -147,7 +151,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel')
+ expect(page).to have_button('Cancel auto-merge')
end
end
@@ -174,7 +178,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel')
+ expect(page).to have_button('Cancel auto-merge')
end
end
end
@@ -222,12 +226,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
let!(:push_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:push)
+ .execute(:push)
+ .payload
end
let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:merge_request_event, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
+ .payload
end
let(:forked_project) { fork_project(project, user2, repository: true) }
@@ -267,12 +273,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when a user updated a merge request from a forked project to the parent project' do
let!(:push_pipeline_2) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:push)
+ .execute(:push)
+ .payload
end
let!(:detached_merge_request_pipeline_2) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:merge_request_event, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
+ .payload
end
before do
@@ -369,7 +377,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel')
+ expect(page).to have_button('Cancel auto-merge')
end
end
@@ -395,7 +403,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel')
+ expect(page).to have_button('Cancel auto-merge')
end
end
end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index a6c8b10f5ca..4967f58528e 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -245,7 +245,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
threads << Thread.new do
Sidekiq::Worker.skipping_transaction_check do
- @pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push)
+ @pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push).payload
end
end
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index 09dfe41a718..208ed1f01e7 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe 'User views diffs', :js do
expect(page).not_to have_selector('.mr-loading-status .loading', visible: true)
end
- it 'expands all diffs' do
+ it 'expands all diffs', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333628' do
first('.diff-toggle-caret').click
expect(page).to have_button('Expand all')
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index ab6242784fe..f96717970bf 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Merge requests > User lists merge requests' do
milestone: create(:milestone, project: project, due_date: '2013-12-11'),
created_at: 1.minute.ago,
updated_at: 1.minute.ago)
- @fix.metrics.update_column(:merged_at, 10.seconds.ago)
+ @fix.metrics.update!(merged_at: 10.seconds.ago, latest_closed_at: 10.seconds.ago)
@markdown = create(:merge_request,
title: 'markdown',
@@ -34,7 +34,7 @@ RSpec.describe 'Merge requests > User lists merge requests' do
milestone: create(:milestone, project: project, due_date: '2013-12-12'),
created_at: 2.minutes.ago,
updated_at: 2.minutes.ago)
- @markdown.metrics.update_column(:merged_at, 50.seconds.ago)
+ @markdown.metrics.update!(merged_at: 10.minutes.ago, latest_closed_at: 10.seconds.ago)
@merge_test = create(:merge_request,
title: 'merge-test',
@@ -42,7 +42,15 @@ RSpec.describe 'Merge requests > User lists merge requests' do
source_branch: 'merge-test',
created_at: 3.minutes.ago,
updated_at: 10.seconds.ago)
- @merge_test.metrics.update_column(:merged_at, 10.seconds.ago)
+ @merge_test.metrics.update!(merged_at: 10.seconds.ago, latest_closed_at: 10.seconds.ago)
+
+ @feature = create(:merge_request,
+ title: 'feature',
+ source_project: project,
+ source_branch: 'feautre',
+ created_at: 2.minutes.ago,
+ updated_at: 1.minute.ago)
+ @feature.metrics.update!(merged_at: 10.seconds.ago, latest_closed_at: 10.minutes.ago)
end
context 'merge request reviewers' do
@@ -71,9 +79,10 @@ RSpec.describe 'Merge requests > User lists merge requests' do
expect(current_path).to eq(project_merge_requests_path(project))
expect(page).to have_content 'merge-test'
+ expect(page).to have_content 'feature'
expect(page).not_to have_content 'fix'
expect(page).not_to have_content 'markdown'
- expect(count_merge_requests).to eq(1)
+ expect(count_merge_requests).to eq(2)
end
it 'filters on a specific assignee' do
@@ -90,28 +99,35 @@ RSpec.describe 'Merge requests > User lists merge requests' do
expect(first_merge_request).to include('fix')
expect(last_merge_request).to include('merge-test')
- expect(count_merge_requests).to eq(3)
+ expect(count_merge_requests).to eq(4)
end
it 'sorts by last updated' do
visit_merge_requests(project, sort: sort_value_recently_updated)
expect(first_merge_request).to include('merge-test')
- expect(count_merge_requests).to eq(3)
+ expect(count_merge_requests).to eq(4)
end
it 'sorts by milestone' do
visit_merge_requests(project, sort: sort_value_milestone)
expect(first_merge_request).to include('fix')
- expect(count_merge_requests).to eq(3)
+ expect(count_merge_requests).to eq(4)
end
it 'sorts by merged at' do
visit_merge_requests(project, sort: sort_value_merged_date)
expect(first_merge_request).to include('markdown')
- expect(count_merge_requests).to eq(3)
+ expect(count_merge_requests).to eq(4)
+ end
+
+ it 'sorts by closed at' do
+ visit_merge_requests(project, sort: sort_value_closed_date)
+
+ expect(first_merge_request).to include('feature')
+ expect(count_merge_requests).to eq(4)
end
it 'filters on one label and sorts by due date' do
diff --git a/spec/features/nav/top_nav_responsive_spec.rb b/spec/features/nav/top_nav_responsive_spec.rb
index dfe3e76f172..5c6a12a37a3 100644
--- a/spec/features/nav/top_nav_responsive_spec.rb
+++ b/spec/features/nav/top_nav_responsive_spec.rb
@@ -8,8 +8,6 @@ RSpec.describe 'top nav responsive', :js do
let_it_be(:user) { create(:user) }
before do
- stub_feature_flags(combined_menu: true)
-
sign_in(user)
visit explore_projects_path
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index 0f453f1c1e5..9a261c6d9c8 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -74,8 +74,6 @@ RSpec.describe 'Profile account page', :js do
expect(find('#feed_token').value).not_to eq(previous_token)
end
-
- expect(page).to have_content 'Feed token was successfully reset'
end
end
diff --git a/spec/features/profiles/user_manages_applications_spec.rb b/spec/features/profiles/user_manages_applications_spec.rb
index 22eed748c00..c76ef2613fd 100644
--- a/spec/features/profiles/user_manages_applications_spec.rb
+++ b/spec/features/profiles/user_manages_applications_spec.rb
@@ -3,55 +3,12 @@
require 'spec_helper'
RSpec.describe 'User manages applications' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:new_application_path) { applications_profile_path }
before do
sign_in(user)
- visit applications_profile_path
end
- it 'manages applications' do
- expect(page).to have_content 'Add new application'
-
- fill_in :doorkeeper_application_name, with: 'test'
- fill_in :doorkeeper_application_redirect_uri, with: 'https://test.com'
- check :doorkeeper_application_scopes_read_user
- click_on 'Save application'
-
- expect(page).to have_content 'Application: test'
- expect(page).to have_content 'Application ID'
- expect(page).to have_content 'Secret'
- expect(page).to have_content 'Confidential Yes'
-
- click_on 'Edit'
-
- expect(page).to have_content 'Edit application'
- fill_in :doorkeeper_application_name, with: 'test_changed'
- uncheck :doorkeeper_application_confidential
- click_on 'Save application'
-
- expect(page).to have_content 'test_changed'
- expect(page).to have_content 'Application ID'
- expect(page).to have_content 'Secret'
- expect(page).to have_content 'Confidential No'
-
- visit applications_profile_path
-
- page.within '.oauth-applications' do
- click_on 'Destroy'
- end
- expect(page.find('.oauth-applications')).not_to have_content 'test_changed'
- end
-
- context 'when scopes are blank' do
- it 'returns an error' do
- expect(page).to have_content 'Add new application'
-
- fill_in :doorkeeper_application_name, with: 'test'
- fill_in :doorkeeper_application_redirect_uri, with: 'https://test.com'
- click_on 'Save application'
-
- expect(page).to have_content("Scopes can't be blank")
- end
- end
+ include_examples 'manage applications'
end
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index 62565eaabe1..5139c724d82 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Project variables', :js do
click_button('Add variable')
page.within('#add-ci-variable') do
- find('[data-qa-selector="ci_variable_key_field"] input').set('akey')
+ find('[data-qa-selector="ci_variable_key_field"] input').set('akey') # rubocop:disable QA/SelectorUsage
find('#ci-variable-value').set('akey_value')
find('[data-testid="environment-scope"]').click
find('[data-testid="ci-environment-search"]').set('review/*')
diff --git a/spec/features/projects/activity/user_sees_design_activity_spec.rb b/spec/features/projects/activity/user_sees_design_activity_spec.rb
index 389e86299e5..70153921b82 100644
--- a/spec/features/projects/activity/user_sees_design_activity_spec.rb
+++ b/spec/features/projects/activity/user_sees_design_activity_spec.rb
@@ -34,26 +34,26 @@ RSpec.describe 'Projects > Activity > User sees design Activity', :js do
visit activity_project_path(project)
expect(page).to have_content('joined project')
- expect(page).to have_content(design_activity(uploader, 'uploaded'))
- expect(page).to have_content(design_activity(editor, 'revised'))
- expect(page).to have_content(design_activity(deleter, 'deleted'))
+ expect(page).to have_content(design_activity(uploader, 'added'))
+ expect(page).to have_content(design_activity(editor, 'updated'))
+ expect(page).to have_content(design_activity(deleter, 'removed'))
end
it 'allows filtering out the design events', :aggregate_failures do
visit activity_project_path(project, event_filter: EventFilter::ISSUE)
- expect(page).not_to have_content(design_activity(uploader, 'uploaded'))
- expect(page).not_to have_content(design_activity(editor, 'revised'))
- expect(page).not_to have_content(design_activity(deleter, 'deleted'))
+ expect(page).not_to have_content(design_activity(uploader, 'added'))
+ expect(page).not_to have_content(design_activity(editor, 'updated'))
+ expect(page).not_to have_content(design_activity(deleter, 'removed'))
end
it 'allows filtering in the design events', :aggregate_failures do
visit activity_project_path(project, event_filter: EventFilter::DESIGNS)
expect(page).not_to have_content('joined project')
- expect(page).to have_content(design_activity(uploader, 'uploaded'))
- expect(page).to have_content(design_activity(editor, 'revised'))
- expect(page).to have_content(design_activity(deleter, 'deleted'))
+ expect(page).to have_content(design_activity(uploader, 'added'))
+ expect(page).to have_content(design_activity(editor, 'updated'))
+ expect(page).to have_content(design_activity(deleter, 'removed'))
end
end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 595304789a6..8281e82959b 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -13,6 +13,22 @@ RSpec.describe 'File blob', :js do
wait_for_requests
end
+ def create_file(file_name, content)
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add #{file_name}",
+ file_path: file_name,
+ file_content: <<-SPEC.strip_heredoc
+ #{content}
+ SPEC
+ ).execute
+ end
+
context 'Ruby file' do
before do
visit_blob('files/ruby/popen.rb')
@@ -121,7 +137,7 @@ RSpec.describe 'File blob', :js do
context 'when ref switch' do
def switch_ref_to(ref_name)
- first('.qa-branches-select').click
+ first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
page.within '.project-refs-form' do
click_link ref_name
@@ -584,94 +600,483 @@ RSpec.describe 'File blob', :js do
end
end
- describe '.gitlab-ci.yml' do
+ context 'files with auxiliary viewers' do
before do
- project.add_maintainer(project.creator)
+ stub_feature_flags(refactor_blob_viewer: true)
+ end
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab-ci.yml",
- file_path: '.gitlab-ci.yml',
- file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
- ).execute
+ describe '.gitlab-ci.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab-ci.yml",
+ file_path: '.gitlab-ci.yml',
+ file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ ).execute
+
+ visit_blob('.gitlab-ci.yml')
+ end
- visit_blob('.gitlab-ci.yml')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that configuration is valid
+ expect(page).to have_content('This GitLab CI configuration is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that configuration is valid
- expect(page).to have_content('This GitLab CI configuration is valid.')
+ describe '.gitlab/route-map.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/route-map.yml",
+ file_path: '.gitlab/route-map.yml',
+ file_content: <<-MAP.strip_heredoc
+ # Team data
+ - source: 'data/team.yml'
+ public: 'team/'
+ MAP
+ ).execute
+
+ visit_blob('.gitlab/route-map.yml')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that map is valid
+ expect(page).to have_content('This Route Map is valid.')
- # shows a learn more link
- expect(page).to have_link('Learn more')
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
end
end
- end
- describe '.gitlab/route-map.yml' do
- before do
- project.add_maintainer(project.creator)
+ describe '.gitlab/dashboards/custom-dashboard.yml' do
+ before do
+ project.add_maintainer(project.creator)
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/route-map.yml",
- file_path: '.gitlab/route-map.yml',
- file_content: <<-MAP.strip_heredoc
- # Team data
- - source: 'data/team.yml'
- public: 'team/'
- MAP
- ).execute
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
+ file_path: '.gitlab/dashboards/custom-dashboard.yml',
+ file_content: file_content
+ ).execute
+ end
+
+ context 'with metrics_dashboard_exhaustive_validations feature flag off' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
+
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+ end
+
+ context 'with metrics_dashboard_exhaustive_validations feature flag on' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
+
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
- visit_blob('.gitlab/route-map.yml')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("root is missing required keys: panel_groups")
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+ end
end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that map is valid
- expect(page).to have_content('This Route Map is valid.')
+ context 'LICENSE' do
+ before do
+ visit_blob('LICENSE')
+ end
- # shows a learn more link
- expect(page).to have_link('Learn more')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows license
+ expect(page).to have_content('This project is licensed under the MIT License.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
+ end
end
end
- end
- describe '.gitlab/dashboards/custom-dashboard.yml' do
- before do
- project.add_maintainer(project.creator)
+ context '*.gemspec' do
+ before do
+ project.add_maintainer(project.creator)
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
- file_path: '.gitlab/dashboards/custom-dashboard.yml',
- file_content: file_content
- ).execute
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add activerecord.gemspec",
+ file_path: 'activerecord.gemspec',
+ file_content: <<-SPEC.strip_heredoc
+ Gem::Specification.new do |s|
+ s.platform = Gem::Platform::RUBY
+ s.name = "activerecord"
+ end
+ SPEC
+ ).execute
+
+ visit_blob('activerecord.gemspec')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows names of dependency manager and package
+ expect(page).to have_content('This project manages its dependencies using RubyGems.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
+ end
+ end
end
- context 'with metrics_dashboard_exhaustive_validations feature flag off' do
+ context 'CONTRIBUTING.md' do
before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ file_name = 'CONTRIBUTING.md'
+
+ create_file(file_name, '## Contribution guidelines')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
+ end
+ end
+ end
+
+ context 'CHANGELOG.md' do
+ before do
+ file_name = 'CHANGELOG.md'
+
+ create_file(file_name, '## Changelog for v1.0.0')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
+ end
+ end
+ end
+
+ context 'Cargo.toml' do
+ before do
+ file_name = 'Cargo.toml'
+
+ create_file(file_name, '
+ [package]
+ name = "hello_world" # the name of the package
+ version = "0.1.0" # the current version, obeying semver
+ authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Cargo.")
+ end
+ end
+ end
+
+ context 'Cartfile' do
+ before do
+ file_name = 'Cartfile'
+
+ create_file(file_name, '
+ gitlab "Alamofire/Alamofire" == 4.9.0
+ gitlab "Alamofire/AlamofireImage" ~> 3.4
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Carthage.")
+ end
+ end
+ end
+
+ context 'composer.json' do
+ before do
+ file_name = 'composer.json'
+
+ create_file(file_name, '
+ {
+ "license": "MIT"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Composer.")
+ end
end
+ end
+
+ context 'Gemfile' do
+ before do
+ file_name = 'Gemfile'
+
+ create_file(file_name, '
+ source "https://rubygems.org"
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+ # Gems here
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Bundler.")
+ end
+ end
+ end
+
+ context 'Godeps.json' do
+ before do
+ file_name = 'Godeps.json'
+
+ create_file(file_name, '
+ {
+ "GoVersion": "go1.6"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using godep.")
+ end
+ end
+ end
+
+ context 'go.mod' do
+ before do
+ file_name = 'go.mod'
+
+ create_file(file_name, '
+ module example.com/mymodule
+
+ go 1.14
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Go Modules.")
+ end
+ end
+ end
+
+ context 'package.json' do
+ before do
+ file_name = 'package.json'
+
+ create_file(file_name, '
+ {
+ "name": "my-awesome-package",
+ "version": "1.0.0"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using npm.")
+ end
+ end
+ end
+
+ context 'podfile' do
+ before do
+ file_name = 'podfile'
+
+ create_file(file_name, 'platform :ios, "8.0"')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'test.podspec' do
+ before do
+ file_name = 'test.podspec'
+
+ create_file(file_name, '
+ Pod::Spec.new do |s|
+ s.name = "TensorFlowLiteC"
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'JSON.podspec.json' do
+ before do
+ file_name = 'JSON.podspec.json'
+
+ create_file(file_name, '
+ {
+ "name": "JSON"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'requirements.txt' do
+ before do
+ file_name = 'requirements.txt'
+
+ create_file(file_name, 'Project requirements')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using pip.")
+ end
+ end
+ end
+
+ context 'yarn.lock' do
+ before do
+ file_name = 'yarn.lock'
+
+ create_file(file_name, '
+ # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+ # yarn lockfile v1
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Yarn.")
+ end
+ end
+ end
+
+ context 'when refactor_blob_viewer is disabled' do
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ describe '.gitlab-ci.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab-ci.yml",
+ file_path: '.gitlab-ci.yml',
+ file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ ).execute
+
+ visit_blob('.gitlab-ci.yml')
+ end
it 'displays an auxiliary viewer' do
aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+ # shows that configuration is valid
+ expect(page).to have_content('This GitLab CI configuration is valid.')
# shows a learn more link
expect(page).to have_link('Learn more')
@@ -679,104 +1084,422 @@ RSpec.describe 'File blob', :js do
end
end
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
+ describe '.gitlab/route-map.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/route-map.yml",
+ file_path: '.gitlab/route-map.yml',
+ file_content: <<-MAP.strip_heredoc
+ # Team data
+ - source: 'data/team.yml'
+ public: 'team/'
+ MAP
+ ).execute
+
+ visit_blob('.gitlab/route-map.yml')
+ end
it 'displays an auxiliary viewer' do
aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
+ # shows that map is valid
+ expect(page).to have_content('This Route Map is valid.')
# shows a learn more link
expect(page).to have_link('Learn more')
end
end
end
- end
- context 'with metrics_dashboard_exhaustive_validations feature flag on' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ describe '.gitlab/dashboards/custom-dashboard.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
+ file_path: '.gitlab/dashboards/custom-dashboard.yml',
+ file_content: file_content
+ ).execute
+ end
+
+ context 'with metrics_dashboard_exhaustive_validations feature flag off' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
+
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+ end
+
+ context 'with metrics_dashboard_exhaustive_validations feature flag on' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
+
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("root is missing required keys: panel_groups")
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+ end
end
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+ context 'LICENSE' do
+ before do
+ visit_blob('LICENSE')
+ end
it 'displays an auxiliary viewer' do
aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+ # shows license
+ expect(page).to have_content('This project is licensed under the MIT License.')
# shows a learn more link
- expect(page).to have_link('Learn more')
+ expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
end
end
end
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
+ context '*.gemspec' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add activerecord.gemspec",
+ file_path: 'activerecord.gemspec',
+ file_content: <<-SPEC.strip_heredoc
+ Gem::Specification.new do |s|
+ s.platform = Gem::Platform::RUBY
+ s.name = "activerecord"
+ end
+ SPEC
+ ).execute
+
+ visit_blob('activerecord.gemspec')
+ end
it 'displays an auxiliary viewer' do
aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("root is missing required keys: panel_groups")
+ # shows names of dependency manager and package
+ expect(page).to have_content('This project manages its dependencies using RubyGems.')
# shows a learn more link
- expect(page).to have_link('Learn more')
+ expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
end
end
end
- end
- end
- context 'LICENSE' do
- before do
- visit_blob('LICENSE')
- end
+ context 'CONTRIBUTING.md' do
+ before do
+ file_name = 'CONTRIBUTING.md'
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows license
- expect(page).to have_content('This project is licensed under the MIT License.')
+ create_file(file_name, '## Contribution guidelines')
+ visit_blob(file_name)
+ end
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
+ end
+ end
end
- end
- end
- context '*.gemspec' do
- before do
- project.add_maintainer(project.creator)
+ context 'CHANGELOG.md' do
+ before do
+ file_name = 'CHANGELOG.md'
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add activerecord.gemspec",
- file_path: 'activerecord.gemspec',
- file_content: <<-SPEC.strip_heredoc
- Gem::Specification.new do |s|
- s.platform = Gem::Platform::RUBY
- s.name = "activerecord"
- end
- SPEC
- ).execute
+ create_file(file_name, '## Changelog for v1.0.0')
+ visit_blob(file_name)
+ end
- visit_blob('activerecord.gemspec')
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
+ end
+ end
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows names of dependency manager and package
- expect(page).to have_content('This project manages its dependencies using RubyGems.')
+ context 'Cargo.toml' do
+ before do
+ file_name = 'Cargo.toml'
+
+ create_file(file_name, '
+ [package]
+ name = "hello_world" # the name of the package
+ version = "0.1.0" # the current version, obeying semver
+ authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Cargo.")
+ end
+ end
+ end
+
+ context 'Cartfile' do
+ before do
+ file_name = 'Cartfile'
+
+ create_file(file_name, '
+ gitlab "Alamofire/Alamofire" == 4.9.0
+ gitlab "Alamofire/AlamofireImage" ~> 3.4
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Carthage.")
+ end
+ end
+ end
+
+ context 'composer.json' do
+ before do
+ file_name = 'composer.json'
+
+ create_file(file_name, '
+ {
+ "license": "MIT"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Composer.")
+ end
+ end
+ end
+
+ context 'Gemfile' do
+ before do
+ file_name = 'Gemfile'
+
+ create_file(file_name, '
+ source "https://rubygems.org"
+
+ # Gems here
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Bundler.")
+ end
+ end
+ end
+
+ context 'Godeps.json' do
+ before do
+ file_name = 'Godeps.json'
+
+ create_file(file_name, '
+ {
+ "GoVersion": "go1.6"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using godep.")
+ end
+ end
+ end
+
+ context 'go.mod' do
+ before do
+ file_name = 'go.mod'
+
+ create_file(file_name, '
+ module example.com/mymodule
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
+ go 1.14
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Go Modules.")
+ end
+ end
+ end
+
+ context 'package.json' do
+ before do
+ file_name = 'package.json'
+
+ create_file(file_name, '
+ {
+ "name": "my-awesome-package",
+ "version": "1.0.0"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using npm.")
+ end
+ end
+ end
+
+ context 'podfile' do
+ before do
+ file_name = 'podfile'
+
+ create_file(file_name, 'platform :ios, "8.0"')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'test.podspec' do
+ before do
+ file_name = 'test.podspec'
+
+ create_file(file_name, '
+ Pod::Spec.new do |s|
+ s.name = "TensorFlowLiteC"
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'JSON.podspec.json' do
+ before do
+ file_name = 'JSON.podspec.json'
+
+ create_file(file_name, '
+ {
+ "name": "JSON"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'requirements.txt' do
+ before do
+ file_name = 'requirements.txt'
+
+ create_file(file_name, 'Project requirements')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using pip.")
+ end
+ end
+ end
+
+ context 'yarn.lock' do
+ before do
+ file_name = 'yarn.lock'
+
+ create_file(file_name, '
+ # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+ # yarn lockfile v1
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Yarn.")
+ end
+ end
end
end
end
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index c0cc12eac66..192bccd6f6e 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -5,17 +5,55 @@ require 'spec_helper'
RSpec.describe 'Pipeline Editor', :js do
include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project_empty_repo, :public) }
let(:user) { create(:user) }
+ let(:default_branch) { 'main' }
+ let(:other_branch) { 'test' }
+
before do
sign_in(user)
project.add_developer(user)
+ project.repository.create_file(user, project.ci_config_path_or_default, 'Default Content', message: 'Create CI file for main', branch_name: default_branch)
+ project.repository.create_file(user, project.ci_config_path_or_default, 'Other Content', message: 'Create CI file for test', branch_name: other_branch)
+
visit project_ci_pipeline_editor_path(project)
+ wait_for_requests
end
it 'user sees the Pipeline Editor page' do
expect(page).to have_content('Pipeline Editor')
end
+
+ context 'branch switcher' do
+ before do
+ stub_feature_flags(pipeline_editor_branch_switcher: true)
+ end
+
+ def switch_to_branch(branch)
+ find('[data-testid="branch-selector"]').click
+
+ page.within '[data-testid="branch-selector"]' do
+ click_button branch
+ wait_for_requests
+ end
+ end
+
+ it 'displays current branch' do
+ page.within('[data-testid="branch-selector"]') do
+ expect(page).to have_content(default_branch)
+ expect(page).not_to have_content(other_branch)
+ end
+ end
+
+ it 'displays updated current branch after switching branches' do
+ switch_to_branch(other_branch)
+
+ page.within('[data-testid="branch-selector"]') do
+ expect(page).to have_content(other_branch)
+ expect(page).not_to have_content(default_branch)
+ end
+ end
+ end
end
diff --git a/spec/features/projects/commit/mini_pipeline_graph_spec.rb b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
index 6de02556175..57b35d81bb8 100644
--- a/spec/features/projects/commit/mini_pipeline_graph_spec.rb
+++ b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js do
before do
build.run
visit project_commit_path(project, project.commit.id)
+ wait_for_requests
end
it 'display icon with status' do
@@ -26,7 +27,7 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js do
end
it 'displays a mini pipeline graph' do
- expect(page).to have_selector('[data-testid="pipeline-mini-graph"]')
+ expect(page).to have_selector('[data-testid="commit-box-mini-graph"]')
first('.mini-pipeline-graph-dropdown-toggle').click
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index e8f197b67c2..0f858c627bc 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -27,12 +27,12 @@ RSpec.describe 'Environment > Metrics' do
shared_examples 'has environment selector' do
it 'has a working environment selector', :js do
- click_link('See metrics')
+ click_link 'Monitoring'
expect(page).to have_current_path(project_metrics_dashboard_path(project, environment: environment.id))
- expect(page).to have_css('[data-qa-selector="environments_dropdown"]')
+ expect(page).to have_css('[data-qa-selector="environments_dropdown"]') # rubocop:disable QA/SelectorUsage
- within('[data-qa-selector="environments_dropdown"]') do
+ within('[data-qa-selector="environments_dropdown"]') do # rubocop:disable QA/SelectorUsage
# Click on the dropdown
click_on(environment.name)
@@ -55,10 +55,10 @@ RSpec.describe 'Environment > Metrics' do
create(:deployment, environment: environment, deployable: build)
end
- it 'shows metrics' do
- click_link('See metrics')
+ it 'shows metrics', :js do
+ click_link 'Monitoring'
- expect(page).to have_css('div#prometheus-graphs')
+ expect(page).to have_css('[data-qa-selector="prometheus_graphs"]') # rubocop:disable QA/SelectorUsage
end
it_behaves_like 'has environment selector'
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index fea054de64e..5320f68b525 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -27,20 +27,6 @@ RSpec.describe 'Environment' do
visit_environment(environment)
end
- it 'shows environment name' do
- expect(page).to have_content(environment.name)
- end
-
- context 'without auto-stop' do
- it 'does not show auto-stop text' do
- expect(page).not_to have_content('Auto stops')
- end
-
- it 'does not show auto-stop button' do
- expect(page).not_to have_selector(auto_stop_button_selector)
- end
- end
-
context 'with auto-stop' do
let!(:environment) { create(:environment, :will_auto_stop, name: 'staging', project: project) }
@@ -48,11 +34,11 @@ RSpec.describe 'Environment' do
visit_environment(environment)
end
- it 'shows auto stop info' do
+ it 'shows auto stop info', :js do
expect(page).to have_content('Auto stops')
end
- it 'shows auto stop button' do
+ it 'shows auto stop button', :js do
expect(page).to have_selector(auto_stop_button_selector)
expect(page.find(auto_stop_button_selector).find(:xpath, '..')['action']).to have_content(cancel_auto_stop_project_environment_path(environment.project, environment))
end
@@ -80,7 +66,6 @@ RSpec.describe 'Environment' do
it 'does show deployment SHA' do
expect(page).to have_link(deployment.short_sha)
expect(page).not_to have_link('Re-deploy')
- expect(page).not_to have_terminal_button
end
end
@@ -186,7 +171,7 @@ RSpec.describe 'Environment' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:deployment) { create(:deployment, :success, environment: environment, deployable: build) }
- it 'does show an external link button' do
+ it 'does show an external link button', :js do
expect(page).to have_link(nil, href: environment.external_url)
end
end
@@ -200,10 +185,6 @@ RSpec.describe 'Environment' do
context 'for project maintainer' do
let(:role) { :maintainer }
- it 'shows the terminal button' do
- expect(page).to have_terminal_button
- end
-
context 'web terminal', :js do
before do
# Stub #terminals as it causes js-enabled feature specs to
@@ -224,14 +205,6 @@ RSpec.describe 'Environment' do
end
end
end
-
- context 'for developer' do
- let(:role) { :developer }
-
- it 'does not show terminal button' do
- expect(page).not_to have_terminal_button
- end
- end
end
end
@@ -259,7 +232,7 @@ RSpec.describe 'Environment' do
click_button('Stop')
click_button('Stop environment') # confirm modal
wait_for_all_requests
- expect(page).to have_content('close_app')
+ expect(page).to have_button('Delete')
end
end
@@ -269,7 +242,7 @@ RSpec.describe 'Environment' do
name: action.ref, project: project)
end
- it 'does not allow to stop environment' do
+ it 'does not allow to stop environment', :js do
expect(page).not_to have_button('Stop')
end
end
@@ -277,7 +250,7 @@ RSpec.describe 'Environment' do
context 'for reporter' do
let(:role) { :reporter }
- it 'does not show stop button' do
+ it 'does not show stop button', :js do
expect(page).not_to have_button('Stop')
end
end
@@ -287,7 +260,7 @@ RSpec.describe 'Environment' do
context 'when environment is stopped' do
let(:environment) { create(:environment, project: project, state: :stopped) }
- it 'does not show stop button' do
+ it 'does not show stop button', :js do
expect(page).not_to have_button('Stop')
end
end
@@ -323,7 +296,7 @@ RSpec.describe 'Environment' do
ref: 'feature')
end
- it 'user visits environment page' do
+ it 'user visits environment page', :js do
visit_environment(environment)
expect(page).to have_button('Stop')
@@ -380,8 +353,4 @@ RSpec.describe 'Environment' do
def visit_environment(environment)
visit project_environment_path(environment.project, environment)
end
-
- def have_terminal_button
- have_link(nil, href: terminal_project_environment_path(project, environment))
- end
end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 0dd4bd55d46..9413fae02e0 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -455,10 +455,10 @@ RSpec.describe 'Environments page', :js do
expect(page).to have_content 'review-1'
expect(page).to have_content 'review-2'
within('.ci-table') do
- within('[data-qa-selector="environment_item"]', text: 'review-1') do
+ within('[data-qa-selector="environment_item"]', text: 'review-1') do # rubocop:disable QA/SelectorUsage
expect(find('.js-auto-stop').text).not_to be_empty
end
- within('[data-qa-selector="environment_item"]', text: 'review-2') do
+ within('[data-qa-selector="environment_item"]', text: 'review-2') do # rubocop:disable QA/SelectorUsage
expect(find('.js-auto-stop').text).not_to be_empty
end
end
diff --git a/spec/features/projects/environments_pod_logs_spec.rb b/spec/features/projects/environments_pod_logs_spec.rb
index 5019e45593c..7d31de2b418 100644
--- a/spec/features/projects/environments_pod_logs_spec.rb
+++ b/spec/features/projects/environments_pod_logs_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'Environment > Pod Logs', :js, :kubeclient do
wait_for_requests
- page.within('.qa-pods-dropdown') do
+ page.within('.qa-pods-dropdown') do # rubocop:disable QA/SelectorUsage
find(".dropdown-toggle:not([disabled])").click
dropdown_items = find(".dropdown-menu").all(".dropdown-item:not([disabled])")
diff --git a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
index 30bfcb645f4..221f07a2f75 100644
--- a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
+++ b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe 'User sees feature flag list', :js do
it 'shows the empty page' do
expect(page).to have_text 'Get started with feature flags'
expect(page).to have_selector('.btn-confirm', text: 'New feature flag')
- expect(page).to have_selector('[data-qa-selector="configure_feature_flags_button"]', text: 'Configure')
+ expect(page).to have_selector('[data-qa-selector="configure_feature_flags_button"]', text: 'Configure') # rubocop:disable QA/SelectorUsage
end
end
end
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index 42f8daf9d5e..37583870cfd 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Projects > Files > Project owner sees a link to create a license
expect(current_path).to eq("/-/ide/project/#{project.full_path}/edit/master/-/LICENSE")
- expect(page).to have_selector('.qa-file-templates-bar')
+ expect(page).to have_selector('.qa-file-templates-bar') # rubocop:disable QA/SelectorUsage
select_template('MIT License')
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 9a6d1961a02..69e4303cce7 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -181,8 +181,8 @@ RSpec.describe 'Project fork' do
it 'allows user to fork only to the group on fork page', :js do
visit new_project_fork_path(project)
- to_personal_namespace = find('[data-qa-selector=fork_namespace_button].disabled')
- to_group = find(".fork-groups button[data-qa-name=#{group.name}]")
+ to_personal_namespace = find('[data-qa-selector=fork_namespace_button].disabled') # rubocop:disable QA/SelectorUsage
+ to_group = find(".fork-groups button[data-qa-name=#{group.name}]") # rubocop:disable QA/SelectorUsage
expect(to_personal_namespace).not_to be_nil
expect(to_group).not_to be_disabled
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index 7f8ded4fa43..ccf3ccc6a96 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -82,8 +82,7 @@ RSpec.describe 'Import/Export - project export integration test', :js do
relations << Gitlab::Json.parse(IO.read(project_json_path))
Dir.glob(File.join(tmpdir, 'tree/project', '*.ndjson')) do |rb_filename|
File.foreach(rb_filename) do |line|
- json = ActiveSupport::JSON.decode(line)
- relations << json
+ relations << Gitlab::Json.parse(line)
end
end
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index a4c57e83bdd..302187917b7 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -62,6 +62,6 @@ RSpec.describe 'Import/Export - project import integration test', :js do
end
def click_import_project
- find('[data-qa-panel-name="import_project"]').click
+ find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage
end
end
diff --git a/spec/features/projects/infrastructure_registry_spec.rb b/spec/features/projects/infrastructure_registry_spec.rb
index c3cb3955092..16dd96e6c02 100644
--- a/spec/features/projects/infrastructure_registry_spec.rb
+++ b/spec/features/projects/infrastructure_registry_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe 'Infrastructure Registry' do
project.add_maintainer(user)
end
- context 'when feature is not available' do
+ context 'when packages registry is not enabled' do
before do
- stub_feature_flags(infrastructure_registry_page: false)
+ stub_config(packages: { enabled: false })
end
it 'gives 404' do
@@ -23,7 +23,7 @@ RSpec.describe 'Infrastructure Registry' do
end
end
- context 'when feature is available', :js do
+ context 'when packages registry is enabled', :js do
before do
visit_project_infrastructure_registry
end
diff --git a/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb b/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
new file mode 100644
index 00000000000..f46cade9d5f
--- /dev/null
+++ b/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User uses inherited settings', :js do
+ include JiraServiceHelper
+
+ include_context 'project service activation'
+
+ before do
+ stub_jira_integration_test
+ end
+
+ shared_examples 'inherited settings' do
+ let_it_be(:project_settings) { { url: 'http://project.com', password: 'project' } }
+
+ describe 'switching from inherited to custom settings' do
+ let_it_be(:integration) { create(:jira_integration, project: project, inherit_from_id: parent_integration.id, **project_settings) }
+
+ it 'clears the form fields and saves the entered values' do
+ visit_project_integration('Jira')
+
+ expect(page).not_to have_button('Use custom settings')
+ expect(page).to have_field('Web URL', with: parent_settings[:url], readonly: true)
+ expect(page).to have_field('Enter new password or API token', with: '', readonly: true)
+
+ click_on 'Use default settings'
+ click_on 'Use custom settings'
+
+ expect(page).not_to have_button('Use default settings')
+ expect(page).to have_field('Web URL', with: project_settings[:url], readonly: false)
+ expect(page).to have_field('Enter new password or API token', with: '', readonly: false)
+
+ fill_in 'Web URL', with: 'http://custom.com'
+ fill_in 'Enter new password or API token', with: 'custom'
+
+ click_save_integration
+
+ expect(page).to have_text('Jira settings saved and active.')
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: nil,
+ url: 'http://custom.com',
+ password: 'custom'
+ )
+ end
+ end
+
+ describe 'switching from custom to inherited settings' do
+ let_it_be(:integration) { create(:jira_integration, project: project, **project_settings) }
+
+ it 'resets the form fields, makes them read-only, and saves the inherited values' do
+ visit_project_integration('Jira')
+
+ expect(page).not_to have_button('Use default settings')
+ expect(page).to have_field('URL', with: project_settings[:url], readonly: false)
+ expect(page).to have_field('Enter new password or API token', with: '', readonly: false)
+
+ click_on 'Use custom settings'
+ click_on 'Use default settings'
+
+ expect(page).not_to have_button('Use custom settings')
+ expect(page).to have_field('URL', with: parent_settings[:url], readonly: true)
+ expect(page).to have_field('Enter new password or API token', with: '', readonly: true)
+
+ click_save_integration
+
+ expect(page).to have_text('Jira settings saved and active.')
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: parent_integration.id,
+ **parent_settings
+ )
+ end
+ end
+ end
+
+ context 'with instance settings' do
+ let_it_be(:parent_settings) { { url: 'http://instance.com', password: 'instance' } }
+ let_it_be(:parent_integration) { create(:jira_integration, :instance, **parent_settings) }
+
+ it_behaves_like 'inherited settings'
+ end
+
+ context 'with group settings' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:parent_settings) { { url: 'http://group.com', password: 'group' } }
+ let_it_be(:parent_integration) { create(:jira_integration, group: group, project: nil, **parent_settings) }
+
+ it_behaves_like 'inherited settings'
+ end
+end
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index 6ce6834b5d5..8c3646125a5 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Project > Members > Invite group', :js do
using RSpec::Parameterized::TableSyntax
where(:invite_members_group_modal_enabled, :expected_invite_group_selector) do
- true | 'button[data-qa-selector="invite_a_group_button"]'
+ true | 'button[data-qa-selector="invite_a_group_button"]' # rubocop:disable QA/SelectorUsage
false | '#invite-group-tab'
end
@@ -43,7 +43,7 @@ RSpec.describe 'Project > Members > Invite group', :js do
end
describe 'Share with group lock' do
- let(:invite_group_selector) { 'button[data-qa-selector="invite_a_group_button"]' }
+ let(:invite_group_selector) { 'button[data-qa-selector="invite_a_group_button"]' } # rubocop:disable QA/SelectorUsage
shared_examples 'the project can be shared with groups' do
it 'the "Invite a group" button exists' do
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index ef28979798f..0b293970703 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -6,374 +6,352 @@ RSpec.describe 'New project', :js do
include Select2Helper
include Spec::Support::Helpers::Features::TopNavSpecHelpers
- shared_examples 'combined_menu: feature flag examples' do
- context 'as a user' do
- let(:user) { create(:user) }
+ context 'as a user' do
+ let(:user) { create(:user) }
- before do
- sign_in(user)
- end
+ before do
+ sign_in(user)
+ end
- it 'shows a message if multiple levels are restricted' do
- Gitlab::CurrentSettings.update!(
- restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
- )
+ it 'shows a message if multiple levels are restricted' do
+ Gitlab::CurrentSettings.update!(
+ restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
+ )
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
- end
+ expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
+ end
- it 'shows a message if all levels are restricted' do
- Gitlab::CurrentSettings.update!(
- restricted_visibility_levels: Gitlab::VisibilityLevel.values
- )
+ it 'shows a message if all levels are restricted' do
+ Gitlab::CurrentSettings.update!(
+ restricted_visibility_levels: Gitlab::VisibilityLevel.values
+ )
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
- end
+ expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
end
+ end
- context 'as an admin' do
- let(:user) { create(:admin) }
+ context 'as an admin' do
+ let(:user) { create(:admin) }
- before do
- sign_in(user)
- end
+ before do
+ sign_in(user)
+ end
- it 'shows "New project" page', :js do
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
+ it 'shows "New project" page', :js do
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- expect(page).to have_content('Project name')
- expect(page).to have_content('Project URL')
- expect(page).to have_content('Project slug')
+ expect(page).to have_content('Project name')
+ expect(page).to have_content('Project URL')
+ expect(page).to have_content('Project slug')
- click_link('New project')
- find('[data-qa-panel-name="import_project"]').click
+ click_link('New project')
+ find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage
- expect(page).to have_link('GitHub')
- expect(page).to have_link('Bitbucket')
- expect(page).to have_link('GitLab.com')
- expect(page).to have_button('Repo by URL')
- expect(page).to have_link('GitLab export')
- end
+ expect(page).to have_link('GitHub')
+ expect(page).to have_link('Bitbucket')
+ expect(page).to have_link('GitLab.com')
+ expect(page).to have_button('Repo by URL')
+ expect(page).to have_link('GitLab export')
+ end
- describe 'manifest import option' do
- before do
- visit new_project_path
+ describe 'manifest import option' do
+ before do
+ visit new_project_path
- find('[data-qa-panel-name="import_project"]').click
- end
+ find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage
+ end
- it 'has Manifest file' do
- expect(page).to have_link('Manifest file')
- end
+ it 'has Manifest file' do
+ expect(page).to have_link('Manifest file')
end
+ end
- context 'Visibility level selector', :js do
- Gitlab::VisibilityLevel.options.each do |key, level|
- it "sets selector to #{key}" do
- stub_application_setting(default_project_visibility: level)
+ context 'Visibility level selector', :js do
+ Gitlab::VisibilityLevel.options.each do |key, level|
+ it "sets selector to #{key}" do
+ stub_application_setting(default_project_visibility: level)
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{level}")).to be_checked
- end
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{level}")).to be_checked
end
+ end
- it "saves visibility level #{level} on validation error" do
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
+ it "saves visibility level #{level} on validation error" do
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- choose(key)
- click_button('Create project')
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{level}")).to be_checked
- end
+ choose(key)
+ click_button('Create project')
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{level}")).to be_checked
end
end
+ end
- context 'when group visibility is private but default is internal' do
- let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
+ context 'when group visibility is private but default is internal' do
+ let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
- before do
- stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
- end
+ before do
+ stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
+ end
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'has private selected' do
- visit new_project_path(namespace_id: group.id)
- find('[data-qa-panel-name="blank_project"]').click
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'has private selected' do
+ visit new_project_path(namespace_id: group.id)
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
- end
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
end
end
+ end
- context 'when admin mode is disabled' do
- it 'is not allowed' do
- visit new_project_path(namespace_id: group.id)
+ context 'when admin mode is disabled' do
+ it 'is not allowed' do
+ visit new_project_path(namespace_id: group.id)
- expect(page).to have_content('Not Found')
- end
+ expect(page).to have_content('Not Found')
end
end
+ end
- context 'when group visibility is public but user requests private' do
- let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
+ context 'when group visibility is public but user requests private' do
+ let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
- before do
- stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
- end
+ before do
+ stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
+ end
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'has private selected' do
- visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
- find('[data-qa-panel-name="blank_project"]').click
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'has private selected' do
+ visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
- end
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
end
end
+ end
- context 'when admin mode is disabled' do
- it 'is not allowed' do
- visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
+ context 'when admin mode is disabled' do
+ it 'is not allowed' do
+ visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
- expect(page).to have_content('Not Found')
- end
+ expect(page).to have_content('Not Found')
end
end
end
+ end
- context 'Readme selector' do
- it 'shows the initialize with Readme checkbox on "Blank project" tab' do
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
+ context 'Readme selector' do
+ it 'shows the initialize with Readme checkbox on "Blank project" tab' do
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
- expect(page).to have_css('input#project_initialize_with_readme')
- expect(page).to have_content('Initialize repository with a README')
- end
+ expect(page).to have_css('input#project_initialize_with_readme')
+ expect(page).to have_content('Initialize repository with a README')
+ end
- it 'does not show the initialize with Readme checkbox on "Create from template" tab' do
- visit new_project_path
- find('[data-qa-panel-name="create_from_template"]').click
- first('.choose-template').click
+ it 'does not show the initialize with Readme checkbox on "Create from template" tab' do
+ visit new_project_path
+ find('[data-qa-panel-name="create_from_template"]').click # rubocop:disable QA/SelectorUsage
+ first('.choose-template').click
- page.within '.project-fields-form' do
- expect(page).not_to have_css('input#project_initialize_with_readme')
- expect(page).not_to have_content('Initialize repository with a README')
- end
+ page.within '.project-fields-form' do
+ expect(page).not_to have_css('input#project_initialize_with_readme')
+ expect(page).not_to have_content('Initialize repository with a README')
end
+ end
- it 'does not show the initialize with Readme checkbox on "Import project" tab' do
- visit new_project_path
- find('[data-qa-panel-name="import_project"]').click
- first('.js-import-git-toggle-button').click
+ it 'does not show the initialize with Readme checkbox on "Import project" tab' do
+ visit new_project_path
+ find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage
+ first('.js-import-git-toggle-button').click
- page.within '#import-project-pane' do
- expect(page).not_to have_css('input#project_initialize_with_readme')
- expect(page).not_to have_content('Initialize repository with a README')
- end
+ page.within '#import-project-pane' do
+ expect(page).not_to have_css('input#project_initialize_with_readme')
+ expect(page).not_to have_content('Initialize repository with a README')
end
end
+ end
- context 'Namespace selector' do
- context 'with user namespace' do
- before do
- visit new_project_path
- find('[data-qa-panel-name="blank_project"]').click
- end
+ context 'Namespace selector' do
+ context 'with user namespace' do
+ before do
+ visit new_project_path
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
+ end
- it 'selects the user namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: user.username)
- end
+ it 'selects the user namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: user.username)
end
end
+ end
- context 'with group namespace' do
- let(:group) { create(:group, :private) }
+ context 'with group namespace' do
+ let(:group) { create(:group, :private) }
- before do
- group.add_owner(user)
- visit new_project_path(namespace_id: group.id)
- find('[data-qa-panel-name="blank_project"]').click
- end
+ before do
+ group.add_owner(user)
+ visit new_project_path(namespace_id: group.id)
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
+ end
- it 'selects the group namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: group.name)
- end
+ it 'selects the group namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: group.name)
end
end
+ end
- context 'with subgroup namespace' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
-
- before do
- group.add_maintainer(user)
- visit new_project_path(namespace_id: subgroup.id)
- find('[data-qa-panel-name="blank_project"]').click
- end
+ context 'with subgroup namespace' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
- it 'selects the group namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: subgroup.full_path)
- end
- end
+ before do
+ group.add_maintainer(user)
+ visit new_project_path(namespace_id: subgroup.id)
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
end
- context 'when changing namespaces dynamically', :js do
- let(:public_group) { create(:group, :public) }
- let(:internal_group) { create(:group, :internal) }
- let(:private_group) { create(:group, :private) }
-
- before do
- public_group.add_owner(user)
- internal_group.add_owner(user)
- private_group.add_owner(user)
- visit new_project_path(namespace_id: public_group.id)
- find('[data-qa-panel-name="blank_project"]').click
- end
-
- it 'enables the correct visibility options' do
- select2(user.namespace_id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
-
- select2(public_group.id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
-
- select2(internal_group.id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
-
- select2(private_group.id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
+ it 'selects the group namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: subgroup.full_path)
end
end
end
- context 'Import project options', :js do
+ context 'when changing namespaces dynamically', :js do
+ let(:public_group) { create(:group, :public) }
+ let(:internal_group) { create(:group, :internal) }
+ let(:private_group) { create(:group, :private) }
+
before do
- visit new_project_path
- find('[data-qa-panel-name="import_project"]').click
+ public_group.add_owner(user)
+ internal_group.add_owner(user)
+ private_group.add_owner(user)
+ visit new_project_path(namespace_id: public_group.id)
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
end
- context 'from git repository url, "Repo by URL"' do
- before do
- first('.js-import-git-toggle-button').click
- end
-
- it 'does not autocomplete sensitive git repo URL' do
- autocomplete = find('#project_import_url')['autocomplete']
-
- expect(autocomplete).to eq('off')
- end
+ it 'enables the correct visibility options' do
+ select2(user.namespace_id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
+
+ select2(public_group.id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
+
+ select2(internal_group.id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
+
+ select2(private_group.id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
+ end
+ end
+ end
- it 'shows import instructions' do
- git_import_instructions = first('.js-toggle-content')
+ context 'Import project options', :js do
+ before do
+ visit new_project_path
+ find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage
+ end
- expect(git_import_instructions).to be_visible
- expect(git_import_instructions).to have_content 'Git repository URL'
- end
+ context 'from git repository url, "Repo by URL"' do
+ before do
+ first('.js-import-git-toggle-button').click
+ end
- it 'reports error if repo URL does not end with .git' do
- fill_in 'project_import_url', with: 'http://foo/bar'
- # simulate blur event
- find('body').click
+ it 'does not autocomplete sensitive git repo URL' do
+ autocomplete = find('#project_import_url')['autocomplete']
- expect(page).to have_text('A repository URL usually ends in a .git suffix')
- end
+ expect(autocomplete).to eq('off')
+ end
- it 'keeps "Import project" tab open after form validation error' do
- collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace)
+ it 'shows import instructions' do
+ git_import_instructions = first('.js-toggle-content')
- fill_in 'project_import_url', with: collision_project.http_url_to_repo
- fill_in 'project_name', with: collision_project.name
+ expect(git_import_instructions).to be_visible
+ expect(git_import_instructions).to have_content 'Git repository URL'
+ end
- click_on 'Create project'
+ it 'reports error if repo URL does not end with .git' do
+ fill_in 'project_import_url', with: 'http://foo/bar'
+ # simulate blur event
+ find('body').click
- expect(page).to have_css('#import-project-pane.active')
- expect(page).not_to have_css('.toggle-import-form.hide')
- end
+ expect(page).to have_text('A repository URL usually ends in a .git suffix')
end
- context 'from GitHub' do
- before do
- first('.js-import-github').click
- end
+ it 'keeps "Import project" tab open after form validation error' do
+ collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace)
- it 'shows import instructions' do
- expect(page).to have_content('Authenticate with GitHub')
- expect(current_path).to eq new_import_github_path
- end
- end
+ fill_in 'project_import_url', with: collision_project.http_url_to_repo
+ fill_in 'project_name', with: collision_project.name
- context 'from manifest file' do
- before do
- first('.import_manifest').click
- end
+ click_on 'Create project'
- it 'shows import instructions' do
- expect(page).to have_content('Manifest file import')
- expect(current_path).to eq new_import_manifest_path
- end
+ expect(page).to have_css('#import-project-pane.active')
+ expect(page).not_to have_css('.toggle-import-form.hide')
end
end
- context 'Namespace selector' do
- context 'with group with DEVELOPER_MAINTAINER_PROJECT_ACCESS project_creation_level' do
- let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
-
- before do
- group.add_developer(user)
- visit new_project_path(namespace_id: group.id)
- find('[data-qa-panel-name="blank_project"]').click
- end
+ context 'from GitHub' do
+ before do
+ first('.js-import-github').click
+ end
- it 'selects the group namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: group.full_path)
- end
- end
+ it 'shows import instructions' do
+ expect(page).to have_content('Authenticate with GitHub')
+ expect(current_path).to eq new_import_github_path
end
end
- end
- end
- context 'with combined_menu feature flag on' do
- let(:needs_rewrite_for_combined_menu_flag_on) { true }
+ context 'from manifest file' do
+ before do
+ first('.import_manifest').click
+ end
- before do
- stub_feature_flags(combined_menu: true)
+ it 'shows import instructions' do
+ expect(page).to have_content('Manifest file import')
+ expect(current_path).to eq new_import_manifest_path
+ end
+ end
end
- it_behaves_like 'combined_menu: feature flag examples'
- end
+ context 'Namespace selector' do
+ context 'with group with DEVELOPER_MAINTAINER_PROJECT_ACCESS project_creation_level' do
+ let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
- context 'with combined_menu feature flag off' do
- let(:needs_rewrite_for_combined_menu_flag_on) { false }
+ before do
+ group.add_developer(user)
+ visit new_project_path(namespace_id: group.id)
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
+ end
- before do
- stub_feature_flags(combined_menu: false)
+ it 'selects the group namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: group.full_path)
+ end
+ end
+ end
end
-
- it_behaves_like 'combined_menu: feature flag examples'
end
end
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
index fa4c57c305d..30298f79312 100644
--- a/spec/features/projects/packages_spec.rb
+++ b/spec/features/projects/packages_spec.rb
@@ -45,6 +45,8 @@ RSpec.describe 'Packages' do
it_behaves_like 'package details link'
end
+ it_behaves_like 'package details link'
+
context 'deleting a package' do
let_it_be(:project) { create(:project) }
let_it_be(:package) { create(:package, project: project) }
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index ce2083b397a..944cee2a998 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -240,10 +240,14 @@ RSpec.describe 'Pipeline', :js do
end
end
- it 'is possible to retry the success job' do
+ it 'is possible to retry the success job', :sidekiq_might_not_need_inline do
find('#ci-badge-build .ci-action-icon-container').click
+ wait_for_requests
expect(page).not_to have_content('Retry job')
+ within('.js-pipeline-header-container') do
+ expect(page).to have_selector('.js-ci-status-icon-running')
+ end
end
end
@@ -282,10 +286,14 @@ RSpec.describe 'Pipeline', :js do
end
end
- it 'is possible to retry the failed build' do
+ it 'is possible to retry the failed build', :sidekiq_might_not_need_inline do
find('#ci-badge-test .ci-action-icon-container').click
+ wait_for_requests
expect(page).not_to have_content('Retry job')
+ within('.js-pipeline-header-container') do
+ expect(page).to have_selector('.js-ci-status-icon-running')
+ end
end
it 'includes the failure reason' do
@@ -308,10 +316,14 @@ RSpec.describe 'Pipeline', :js do
end
end
- it 'is possible to play the manual job' do
+ it 'is possible to play the manual job', :sidekiq_might_not_need_inline do
find('#ci-badge-manual-build .ci-action-icon-container').click
+ wait_for_requests
expect(page).not_to have_content('Play job')
+ within('.js-pipeline-header-container') do
+ expect(page).to have_selector('.js-ci-status-icon-running')
+ end
end
end
@@ -411,11 +423,18 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
find('[data-testid="retryPipeline"]').click
+ wait_for_requests
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
expect(page).not_to have_content('Retry')
end
+
+ it 'shows running status in pipeline header', :sidekiq_might_not_need_inline do
+ within('.js-pipeline-header-container') do
+ expect(page).to have_selector('.js-ci-status-icon-running')
+ end
+ end
end
end
@@ -437,44 +456,28 @@ RSpec.describe 'Pipeline', :js do
end
end
- shared_context 'delete pipeline' do
- context 'deleting pipeline' do
- context 'when user can not delete' do
- before do
- visit_pipeline
- end
-
- it { expect(page).not_to have_button('Delete') }
- end
-
- context 'when deleting' do
- before do
- group.add_owner(user)
-
- visit_pipeline
-
- click_button 'Delete'
- click_button 'Delete pipeline'
- end
-
- it 'redirects to pipeline overview page', :sidekiq_inline do
- expect(page).to have_content('The pipeline has been deleted')
- expect(current_path).to eq(project_pipelines_path(project))
- end
- end
+ context 'when user can not delete' do
+ before do
+ visit_pipeline
end
- end
- context 'when cancel_pipelines_prior_to_destroy is enabled' do
- include_context 'delete pipeline'
+ it { expect(page).not_to have_button('Delete') }
end
- context 'when cancel_pipelines_prior_to_destroy is disabled' do
+ context 'when deleting' do
before do
- stub_feature_flags(cancel_pipelines_prior_to_destroy: false)
+ group.add_owner(user)
+
+ visit_pipeline
+
+ click_button 'Delete'
+ click_button 'Delete pipeline'
end
- include_context 'delete pipeline'
+ it 'redirects to pipeline overview page', :sidekiq_inline do
+ expect(page).to have_content('The pipeline has been deleted')
+ expect(current_path).to eq(project_pipelines_path(project))
+ end
end
context 'when pipeline ref does not exist in repository anymore' do
@@ -775,65 +778,10 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id' do
subject { visit project_pipeline_path(project, pipeline) }
- # remove when :graphql_pipeline_details flag is removed
- # https://gitlab.com/gitlab-org/gitlab/-/issues/299112
- context 'when :graphql_pipeline_details flag is off' do
- before do
- stub_feature_flags(graphql_pipeline_details: false)
- stub_feature_flags(graphql_pipeline_details_users: false)
- end
-
- it 'shows deploy job as created' do
- subject
-
- within('.pipeline-header-container') do
- expect(page).to have_content('pending')
- end
-
- within('.js-pipeline-graph') do
- within '.stage-column:nth-child(1)' do
- expect(page).to have_content('test')
- expect(page).to have_css('.ci-status-icon-pending')
- end
-
- within '.stage-column:nth-child(2)' do
- expect(page).to have_content('deploy')
- expect(page).to have_css('.ci-status-icon-created')
- end
- end
- end
-
- context 'when test job succeeded' do
- before do
- test_job.success!
- end
-
- it 'shows deploy job as pending' do
- subject
-
- within('.pipeline-header-container') do
- expect(page).to have_content('running')
- end
-
- within('.pipeline-graph') do
- within '.stage-column:nth-child(1)' do
- expect(page).to have_content('test')
- expect(page).to have_css('.ci-status-icon-success')
- end
-
- within '.stage-column:nth-child(2)' do
- expect(page).to have_content('deploy')
- expect(page).to have_css('.ci-status-icon-pending')
- end
- end
- end
- end
- end
-
it 'shows deploy job as created' do
subject
- within('.pipeline-header-container') do
+ within('.js-pipeline-header-container') do
expect(page).to have_content('pending')
end
@@ -858,7 +806,7 @@ RSpec.describe 'Pipeline', :js do
it 'shows deploy job as pending' do
subject
- within('.pipeline-header-container') do
+ within('.js-pipeline-header-container') do
expect(page).to have_content('running')
end
@@ -887,7 +835,7 @@ RSpec.describe 'Pipeline', :js do
it 'shows deploy job as waiting for resource' do
subject
- within('.pipeline-header-container') do
+ within('.js-pipeline-header-container') do
expect(page).to have_content('waiting')
end
@@ -899,29 +847,6 @@ RSpec.describe 'Pipeline', :js do
end
end
- # remove when :graphql_pipeline_details flag is removed
- # https://gitlab.com/gitlab-org/gitlab/-/issues/299112
- context 'when :graphql_pipeline_details flag is off' do
- before do
- stub_feature_flags(graphql_pipeline_details: false)
- stub_feature_flags(graphql_pipeline_details_users: false)
- end
- it 'shows deploy job as waiting for resource' do
- subject
-
- within('.pipeline-header-container') do
- expect(page).to have_content('waiting')
- end
-
- within('.pipeline-graph') do
- within '.stage-column:nth-child(2)' do
- expect(page).to have_content('deploy')
- expect(page).to have_css('.ci-status-icon-waiting-for-resource')
- end
- end
- end
- end
-
context 'when resource is released from another job' do
before do
another_job.success!
@@ -930,7 +855,7 @@ RSpec.describe 'Pipeline', :js do
it 'shows deploy job as pending' do
subject
- within('.pipeline-header-container') do
+ within('.js-pipeline-header-container') do
expect(page).to have_content('running')
end
@@ -941,29 +866,6 @@ RSpec.describe 'Pipeline', :js do
end
end
end
-
- # remove when :graphql_pipeline_details flag is removed
- # https://gitlab.com/gitlab-org/gitlab/-/issues/299112
- context 'when :graphql_pipeline_details flag is off' do
- before do
- stub_feature_flags(graphql_pipeline_details: false)
- stub_feature_flags(graphql_pipeline_details_users: false)
- end
- it 'shows deploy job as pending' do
- subject
-
- within('.pipeline-header-container') do
- expect(page).to have_content('running')
- end
-
- within('.pipeline-graph') do
- within '.stage-column:nth-child(2)' do
- expect(page).to have_content('deploy')
- expect(page).to have_css('.ci-status-icon-pending')
- end
- end
- end
- end
end
context 'when deploy job is a bridge to trigger a downstream pipeline' do
@@ -975,7 +877,7 @@ RSpec.describe 'Pipeline', :js do
it 'shows deploy job as waiting for resource' do
subject
- within('.pipeline-header-container') do
+ within('.js-pipeline-header-container') do
expect(page).to have_content('waiting')
end
@@ -997,7 +899,7 @@ RSpec.describe 'Pipeline', :js do
it 'shows deploy job as waiting for resource' do
subject
- within('.pipeline-header-container') do
+ within('.js-pipeline-header-container') do
expect(page).to have_content('waiting')
end
@@ -1231,23 +1133,6 @@ RSpec.describe 'Pipeline', :js do
expect(page).not_to have_content('Failed Jobs')
expect(page).to have_selector('.js-pipeline-graph')
end
-
- # remove when :graphql_pipeline_details flag is removed
- # https://gitlab.com/gitlab-org/gitlab/-/issues/299112
- context 'when :graphql_pipeline_details flag is off' do
- before do
- stub_feature_flags(graphql_pipeline_details: false)
- stub_feature_flags(graphql_pipeline_details_users: false)
- end
-
- it 'displays the pipeline graph' do
- subject
-
- expect(current_path).to eq(pipeline_path(pipeline))
- expect(page).not_to have_content('Failed Jobs')
- expect(page).to have_selector('.pipeline-visualization')
- end
- end
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 1de0eea4657..bd22c8632e4 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'Pipelines', :js do
before do
sign_in(user)
- stub_feature_flags(graphql_pipeline_details: false)
- stub_feature_flags(graphql_pipeline_details_users: false)
project.add_developer(user)
project.update!(auto_devops_attributes: { enabled: false })
@@ -585,6 +583,26 @@ RSpec.describe 'Pipelines', :js do
expect(page).to have_selector('.gl-pagination .page-link', count: 4)
end
end
+
+ context 'with pipeline key selection' do
+ before do
+ visit project_pipelines_path(project)
+ wait_for_requests
+ end
+
+ it 'changes the Pipeline ID column for Pipeline IID' do
+ page.find('[data-testid="pipeline-key-dropdown"]').click
+
+ within '.gl-new-dropdown-contents' do
+ dropdown_options = page.find_all '.gl-new-dropdown-item'
+
+ dropdown_options[1].click
+ end
+
+ expect(page.find('[data-testid="pipeline-th"]')).to have_content 'Pipeline IID'
+ expect(page.find('[data-testid="pipeline-url-link"]')).to have_content "##{pipeline.iid}"
+ end
+ end
end
describe 'GET /:project/-/pipelines/show' do
diff --git a/spec/features/projects/services/user_activates_irker_spec.rb b/spec/features/projects/services/user_activates_irker_spec.rb
index e4d92dc30ff..004aa116bb3 100644
--- a/spec/features/projects/services/user_activates_irker_spec.rb
+++ b/spec/features/projects/services/user_activates_irker_spec.rb
@@ -2,16 +2,16 @@
require 'spec_helper'
-RSpec.describe 'User activates Irker (IRC gateway)' do
+RSpec.describe 'User activates irker (IRC gateway)' do
include_context 'project service activation'
it 'activates service', :js do
- visit_project_integration('Irker (IRC gateway)')
+ visit_project_integration('irker (IRC gateway)')
check('Colorize messages')
fill_in('Recipients', with: 'irc://chat.freenode.net/#commits')
click_test_then_save_integration(expect_test_to_fail: false)
- expect(page).to have_content('Irker (IRC gateway) settings saved and active.')
+ expect(page).to have_content('irker (IRC gateway) settings saved and active.')
end
end
diff --git a/spec/features/projects/services/user_activates_pushover_spec.rb b/spec/features/projects/services/user_activates_pushover_spec.rb
index 97003ab7c2a..d92f69e700a 100644
--- a/spec/features/projects/services/user_activates_pushover_spec.rb
+++ b/spec/features/projects/services/user_activates_pushover_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'User activates Pushover' do
fill_in('API key', with: 'verySecret')
fill_in('User key', with: 'verySecret')
fill_in('Device', with: 'myDevice')
- select('High Priority', from: 'Priority')
+ select('High priority', from: 'Priority')
select('Bike', from: 'Sound')
click_test_then_save_integration(expect_test_to_fail: false)
diff --git a/spec/features/projects/services/user_views_services_spec.rb b/spec/features/projects/services/user_views_services_spec.rb
index b936a7f38f6..201a58ba379 100644
--- a/spec/features/projects/services/user_views_services_spec.rb
+++ b/spec/features/projects/services/user_views_services_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'User views services', :js do
expect(page).to have_content('Atlassian Bamboo')
expect(page).to have_content('JetBrains TeamCity')
expect(page).to have_content('Asana')
- expect(page).to have_content('Irker (IRC gateway)')
+ expect(page).to have_content('irker (IRC gateway)')
expect(page).to have_content('Packagist')
end
end
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 3f9f2dae453..509729d526d 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -9,12 +9,12 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
let(:container_registry_enabled) { true }
- let(:container_registry_enabled_on_project) { true }
+ let(:container_registry_enabled_on_project) { ProjectFeature::ENABLED }
subject { visit project_settings_packages_and_registries_path(project) }
before do
- project.update!(container_registry_enabled: container_registry_enabled_on_project)
+ project.project_feature.update!(container_registry_access_level: container_registry_enabled_on_project)
project.container_expiration_policy.update!(enabled: true)
sign_in(user)
@@ -104,7 +104,7 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
end
context 'when container registry is disabled on project' do
- let(:container_registry_enabled_on_project) { false }
+ let(:container_registry_enabled_on_project) { ProjectFeature::DISABLED }
it 'does not exists' do
subject
diff --git a/spec/features/projects/show/schema_markup_spec.rb b/spec/features/projects/show/schema_markup_spec.rb
index 28803db924a..8adbdb64f1b 100644
--- a/spec/features/projects/show/schema_markup_spec.rb
+++ b/spec/features/projects/show/schema_markup_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Projects > Show > Schema Markup' do
expect(page).to have_selector('[itemprop="identifier"]', text: "Project ID: #{project.id}")
expect(page).to have_selector('[itemprop="description"]', text: project.description)
expect(page).to have_selector('[itemprop="license"]', text: project.repository.license.name)
- expect(find_all('[itemprop="keywords"]').map(&:text)).to match_array(project.topic_list.map(&:capitalize))
+ expect(find_all('[itemprop="keywords"]').map(&:text)).to match_array(project.topic_list)
expect(page).to have_selector('[itemprop="about"]')
end
end
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index 613033373e8..552f068ecc7 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js do
# The dropdown above the tree
page.within('.repo-breadcrumb') do
- find('.qa-add-to-tree').click
+ find('.qa-add-to-tree').click # rubocop:disable QA/SelectorUsage
aggregate_failures 'dropdown links above the repo tree' do
expect(page).to have_link('New file')
@@ -71,7 +71,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js do
find_new_menu_toggle.click
end
- expect(page).not_to have_selector('.qa-add-to-tree')
+ expect(page).not_to have_selector('.qa-add-to-tree') # rubocop:disable QA/SelectorUsage
expect(page).not_to have_link('Web IDE')
end
diff --git a/spec/features/projects/tags/user_edits_tags_spec.rb b/spec/features/projects/tags/user_edits_tags_spec.rb
index 7a8a685f3d9..9f66b7274e8 100644
--- a/spec/features/projects/tags/user_edits_tags_spec.rb
+++ b/spec/features/projects/tags/user_edits_tags_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'Project > Tags', :js do
note_textarea = page.find('.js-gfm-input')
# Click on Bold button
- page.find('.md-header-toolbar button.toolbar-btn:first-child').click
+ page.find('.md-header-toolbar button:first-child').click
expect(note_textarea.value).to eq('****')
end
diff --git a/spec/features/projects/terraform_spec.rb b/spec/features/projects/terraform_spec.rb
index d080d101285..2c63f2bfc02 100644
--- a/spec/features/projects/terraform_spec.rb
+++ b/spec/features/projects/terraform_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'Terraform', :js do
it 'displays a table with terraform states' do
expect(page).to have_selector(
- '[data-testid="terraform-states-table-name"]',
+ "[data-testid='terraform-states-table-name']",
count: project.terraform_states.size
)
end
@@ -64,7 +64,7 @@ RSpec.describe 'Terraform', :js do
expect(page).to have_content(additional_state.name)
find("[data-testid='terraform-state-actions-#{additional_state.name}']").click
- find('[data-testid="terraform-state-remove"]').click
+ find("[data-testid='terraform-state-remove']").click
fill_in "terraform-state-remove-input-#{additional_state.name}", with: additional_state.name
click_button 'Remove'
@@ -72,6 +72,21 @@ RSpec.describe 'Terraform', :js do
expect { additional_state.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
+
+ context 'when clicking on copy Terraform init command' do
+ it 'shows the modal with the init command' do
+ visit project_terraform_index_path(project)
+
+ expect(page).to have_content(terraform_state.name)
+
+ page.within("[data-testid='terraform-state-actions-#{terraform_state.name}']") do
+ click_button class: 'gl-dropdown-toggle'
+ click_button 'Copy Terraform init command'
+ end
+
+ expect(page).to have_content("To get access to this terraform state from your local computer, run the following command at the command line.")
+ end
+ end
end
end
@@ -87,11 +102,11 @@ RSpec.describe 'Terraform', :js do
context 'when user visits the index page' do
it 'displays a table without an action dropdown', :aggregate_failures do
expect(page).to have_selector(
- '[data-testid="terraform-states-table-name"]',
+ "[data-testid='terraform-states-table-name']",
count: project.terraform_states.size
)
- expect(page).not_to have_selector('[data-testid*="terraform-state-actions"]')
+ expect(page).not_to have_selector("[data-testid*='terraform-state-actions']")
end
end
end
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index e2ae858cb9b..f6127b38bd6 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -49,8 +49,8 @@ RSpec.describe 'Multi-file editor new directory', :js do
# Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
# (as it is with WEBDRIVER_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
# taller (as it is by default with chrome headless) then the button will not exist.
- if page.has_css?('.qa-begin-commit-button')
- find('.qa-begin-commit-button').click
+ if page.has_css?('.qa-begin-commit-button') # rubocop:disable QA/SelectorUsage
+ find('.qa-begin-commit-button').click # rubocop:disable QA/SelectorUsage
end
fill_in('commit-message', with: 'commit message ide')
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index 956b8898854..33be02a9121 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -39,8 +39,8 @@ RSpec.describe 'Multi-file editor new file', :js do
# Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
# (as it is with WEBDRIVER_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
# taller (as it is by default with chrome headless) then the button will not exist.
- if page.has_css?('.qa-begin-commit-button')
- find('.qa-begin-commit-button').click
+ if page.has_css?('.qa-begin-commit-button') # rubocop:disable QA/SelectorUsage
+ find('.qa-begin-commit-button').click # rubocop:disable QA/SelectorUsage
end
fill_in('commit-message', with: 'commit message ide')
diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb
index ca9e0a23888..f8bbaa9535b 100644
--- a/spec/features/projects/tree/tree_show_spec.rb
+++ b/spec/features/projects/tree/tree_show_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Projects tree', :js do
expect(page).to have_selector('.tree-item')
expect(page).to have_content('add tests for .gitattributes custom highlighting')
expect(page).not_to have_selector('.flash-alert')
- expect(page).not_to have_selector('[data-qa-selector="label-lfs"]', text: 'LFS')
+ expect(page).not_to have_selector('[data-qa-selector="label-lfs"]', text: 'LFS') # rubocop:disable QA/SelectorUsage
end
it 'renders tree table for a subtree without errors' do
@@ -35,7 +35,7 @@ RSpec.describe 'Projects tree', :js do
expect(page).to have_selector('.tree-item')
expect(page).to have_content('add spaces in whitespace file')
- expect(page).not_to have_selector('[data-qa-selector="label-lfs"]', text: 'LFS')
+ expect(page).not_to have_selector('[data-qa-selector="label-lfs"]', text: 'LFS') # rubocop:disable QA/SelectorUsage
expect(page).not_to have_selector('.flash-alert')
end
@@ -112,7 +112,7 @@ RSpec.describe 'Projects tree', :js do
it 'renders LFS badge on blob item' do
visit project_tree_path(project, File.join('master', 'files/lfs'))
- expect(page).to have_selector('[data-qa-selector="label-lfs"]', text: 'LFS')
+ expect(page).to have_selector('[data-qa-selector="label-lfs"]', text: 'LFS') # rubocop:disable QA/SelectorUsage
end
end
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index a5b51bac747..2dc2f168896 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -8,17 +8,14 @@ RSpec.describe 'User creates a project', :js do
before do
sign_in(user)
create(:personal_key, user: user)
-
- stub_experiments(new_project_readme: :candidate)
end
it 'creates a new project' do
visit(new_project_path)
- find('[data-qa-panel-name="blank_project"]').click
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
fill_in(:project_name, with: 'Empty')
- # part of the new_project_readme experiment
expect(page).to have_checked_field 'Initialize repository with a README'
uncheck 'Initialize repository with a README'
@@ -46,7 +43,7 @@ RSpec.describe 'User creates a project', :js do
it 'creates a new project' do
visit(new_project_path)
- find('[data-qa-panel-name="blank_project"]').click
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
fill_in :project_name, with: 'A Subgroup Project'
fill_in :project_path, with: 'a-subgroup-project'
@@ -75,7 +72,7 @@ RSpec.describe 'User creates a project', :js do
it 'creates a new project' do
visit(new_project_path)
- find('[data-qa-panel-name="blank_project"]').click
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
fill_in :project_name, with: 'a-new-project'
fill_in :project_path, with: 'a-new-project'
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index a3d134d49eb..59ad7d31ea7 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Project' do
shared_examples 'creates from template' do |template, sub_template_tab = nil|
it "is created from template", :js do
- find('[data-qa-panel-name="create_from_template"]').click
+ find('[data-qa-panel-name="create_from_template"]').click # rubocop:disable QA/SelectorUsage
find(".project-template #{sub_template_tab}").click if sub_template_tab
find("label[for=#{template.name}]").click
fill_in("project_name", with: template.name)
@@ -132,8 +132,8 @@ RSpec.describe 'Project' do
visit path
- expect(page).to have_css('.home-panel-topic-list')
- expect(page).to have_link('Topic1', href: explore_projects_path(topic: 'topic1'))
+ expect(page).to have_selector('[data-testid="project_topic_list"]')
+ expect(page).to have_link('topic1', href: explore_projects_path(topic: 'topic1'))
end
it 'shows up to 3 project topics' do
@@ -141,10 +141,10 @@ RSpec.describe 'Project' do
visit path
- expect(page).to have_css('.home-panel-topic-list')
- expect(page).to have_link('Topic1', href: explore_projects_path(topic: 'topic1'))
- expect(page).to have_link('Topic2', href: explore_projects_path(topic: 'topic2'))
- expect(page).to have_link('Topic3', href: explore_projects_path(topic: 'topic3'))
+ expect(page).to have_selector('[data-testid="project_topic_list"]')
+ expect(page).to have_link('topic1', href: explore_projects_path(topic: 'topic1'))
+ expect(page).to have_link('topic2', href: explore_projects_path(topic: 'topic2'))
+ expect(page).to have_link('topic3', href: explore_projects_path(topic: 'topic3'))
expect(page).to have_content('+ 1 more')
end
end
@@ -290,7 +290,7 @@ RSpec.describe 'Project' do
it 'has working links to submodules' do
click_link('645f6c4c')
- expect(page).to have_selector('.qa-branches-select', text: '645f6c4c82fd3f5e06f67134450a570b795e55a6')
+ expect(page).to have_selector('.qa-branches-select', text: '645f6c4c82fd3f5e06f67134450a570b795e55a6') # rubocop:disable QA/SelectorUsage
end
context 'for signed commit on default branch', :js do
diff --git a/spec/features/registrations/welcome_spec.rb b/spec/features/registrations/welcome_spec.rb
deleted file mode 100644
index 74320b69f19..00000000000
--- a/spec/features/registrations/welcome_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Welcome screen' do
- let(:user) { create(:user) }
-
- before do
- gitlab_sign_in(user)
-
- visit users_sign_up_welcome_path
- end
-
- it 'shows the email opt in' do
- select 'Software Developer', from: 'user_role'
- check 'user_email_opted_in'
- click_button 'Get started!'
-
- expect(user.reload.email_opted_in).to eq(true)
- end
-end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index b61a769185e..22de77f7cd0 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -16,10 +16,10 @@ RSpec.describe 'Runners' do
project.add_maintainer(user)
end
- it 'user can see a button to install runners on kubernetes clusters' do
+ it 'user can see a link with instructions on how to install GitLab Runner' do
visit project_runners_path(project)
- expect(page).to have_link('Install GitLab Runner on Kubernetes', href: project_clusters_path(project))
+ expect(page).to have_link('Install GitLab Runner and ensure it\'s running.', href: "https://docs.gitlab.com/runner/install/")
end
end
@@ -343,12 +343,6 @@ RSpec.describe 'Runners' do
expect(page).to have_content 'No runners found'
end
-
- it 'user can see a link to install runners on kubernetes clusters' do
- visit group_settings_ci_cd_path(group)
-
- expect(page).to have_link('Install GitLab Runner on Kubernetes', href: group_clusters_path(group))
- end
end
context 'group with a runner' do
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index c002d199b01..8736f16b991 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -254,6 +254,7 @@ RSpec.describe 'User uses header search field', :js do
href = search_path(search: term)
href.concat("&project_id=#{project_id}") if project_id
href.concat("&group_id=#{group_id}") if group_id
+ href.concat("&nav_source=navbar")
".dropdown a[href='#{href}']"
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index 0309df8f32a..fb2873f1c96 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -57,6 +57,56 @@ RSpec.describe 'User page' do
end
end
+ context 'location' do
+ let_it_be(:location) { 'San Francisco, CA' }
+
+ context 'when location is set' do
+ let_it_be(:user) { create(:user, location: location) }
+
+ it 'shows location' do
+ subject
+
+ expect(page).to have_content(location)
+ end
+ end
+
+ context 'when location is not set' do
+ it 'does not show location' do
+ subject
+
+ expect(page).not_to have_content(location)
+ end
+ end
+ end
+
+ context 'timezone' do
+ let_it_be(:timezone) { 'America/Los_Angeles' }
+
+ before do
+ travel_to Time.find_zone(timezone).local(2021, 7, 20, 15, 30, 45)
+ end
+
+ context 'when timezone is set' do
+ let_it_be(:user) { create(:user, timezone: timezone) }
+
+ it 'shows local time' do
+ subject
+
+ expect(page).to have_content('3:30 PM')
+ end
+ end
+
+ context 'when timezone is invalid' do
+ let_it_be(:user) { create(:user, timezone: 'Foo/Bar') }
+
+ it 'shows local time using the configured default timezone (UTC in this case)' do
+ subject
+
+ expect(page).to have_content('10:30 PM')
+ end
+ end
+ end
+
context 'follow/unfollow and followers/following' do
let_it_be(:followee) { create(:user) }
let_it_be(:follower) { create(:user) }
@@ -228,6 +278,14 @@ RSpec.describe 'User page' do
expect(page).to have_content("(they/them)")
end
+ it 'shows the pronunctiation of the user if there was one' do
+ user.user_detail.update_column(:pronunciation, 'pruh-nuhn-see-ay-shn')
+
+ subject
+
+ expect(page).to have_content("Pronounced as: pruh-nuhn-see-ay-shn")
+ end
+
context 'signup disabled' do
it 'shows the sign in link' do
stub_application_setting(signup_enabled: false)
diff --git a/spec/finders/ci/pipelines_finder_spec.rb b/spec/finders/ci/pipelines_finder_spec.rb
index 16561aa65b6..c7bd52576e8 100644
--- a/spec/finders/ci/pipelines_finder_spec.rb
+++ b/spec/finders/ci/pipelines_finder_spec.rb
@@ -252,6 +252,29 @@ RSpec.describe Ci::PipelinesFinder do
end
end
+ context 'when source is specified' do
+ let(:params) { { source: 'web' } }
+ let!(:web_pipeline) { create(:ci_pipeline, project: project, source: 'web') }
+ let!(:push_pipeline) { create(:ci_pipeline, project: project, source: 'push') }
+ let!(:api_pipeline) { create(:ci_pipeline, project: project, source: 'api') }
+
+ context 'when `pipeline_source_filter` feature flag is disabled' do
+ before do
+ stub_feature_flags(pipeline_source_filter: false)
+ end
+
+ it 'returns all the pipelines' do
+ is_expected.to contain_exactly(web_pipeline, push_pipeline, api_pipeline)
+ end
+ end
+
+ context 'when `pipeline_source_filter` feature flag is enabled' do
+ it 'returns only the matched pipeline' do
+ is_expected.to eq([web_pipeline])
+ end
+ end
+ end
+
describe 'ordering' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index 7f05947ac48..599b4ffb804 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -33,41 +33,43 @@ RSpec.describe Ci::RunnersFinder do
end
end
- context 'filter by search term' do
- it 'calls Ci::Runner.search' do
- expect(Ci::Runner).to receive(:search).with('term').and_call_original
+ context 'filtering' do
+ context 'by search term' do
+ it 'calls Ci::Runner.search' do
+ expect(Ci::Runner).to receive(:search).with('term').and_call_original
- described_class.new(current_user: admin, params: { search: 'term' }).execute
+ described_class.new(current_user: admin, params: { search: 'term' }).execute
+ end
end
- end
- context 'filter by status' do
- Ci::Runner::AVAILABLE_STATUSES.each do |status|
- it "calls the corresponding :#{status} scope on Ci::Runner" do
- expect(Ci::Runner).to receive(status.to_sym).and_call_original
+ context 'by status' do
+ Ci::Runner::AVAILABLE_STATUSES.each do |status|
+ it "calls the corresponding :#{status} scope on Ci::Runner" do
+ expect(Ci::Runner).to receive(status.to_sym).and_call_original
- described_class.new(current_user: admin, params: { status_status: status }).execute
+ described_class.new(current_user: admin, params: { status_status: status }).execute
+ end
end
end
- end
- context 'filter by runner type' do
- it 'calls the corresponding scope on Ci::Runner' do
- expect(Ci::Runner).to receive(:project_type).and_call_original
+ context 'by runner type' do
+ it 'calls the corresponding scope on Ci::Runner' do
+ expect(Ci::Runner).to receive(:project_type).and_call_original
- described_class.new(current_user: admin, params: { type_type: 'project_type' }).execute
+ described_class.new(current_user: admin, params: { type_type: 'project_type' }).execute
+ end
end
- end
- context 'filter by tag_name' do
- it 'calls the corresponding scope on Ci::Runner' do
- expect(Ci::Runner).to receive(:tagged_with).with(%w[tag1 tag2]).and_call_original
+ context 'by tag_name' do
+ it 'calls the corresponding scope on Ci::Runner' do
+ expect(Ci::Runner).to receive(:tagged_with).with(%w[tag1 tag2]).and_call_original
- described_class.new(current_user: admin, params: { tag_name: %w[tag1 tag2] }).execute
+ described_class.new(current_user: admin, params: { tag_name: %w[tag1 tag2] }).execute
+ end
end
end
- context 'sort' do
+ context 'sorting' do
let_it_be(:runner1) { create :ci_runner, created_at: '2018-07-12 07:00', contacted_at: 1.minute.ago }
let_it_be(:runner2) { create :ci_runner, created_at: '2018-07-12 08:00', contacted_at: 3.minutes.ago }
let_it_be(:runner3) { create :ci_runner, created_at: '2018-07-12 09:00', contacted_at: 2.minutes.ago }
@@ -121,7 +123,7 @@ RSpec.describe Ci::RunnersFinder do
end
end
- context 'non admin user' do
+ context 'by non admin user' do
it 'returns no runners' do
user = create :user
create :ci_runner, active: true
@@ -131,7 +133,7 @@ RSpec.describe Ci::RunnersFinder do
end
end
- context 'user is nil' do
+ context 'when user is nil' do
it 'returns no runners' do
user = nil
create :ci_runner, active: true
@@ -182,85 +184,69 @@ RSpec.describe Ci::RunnersFinder do
describe '#execute' do
subject { described_class.new(current_user: user, group: group, params: params).execute }
- context 'no params' do
+ context 'with user as group owner' do
before do
group.add_owner(user)
end
- it 'returns all runners' do
- expect(subject).to eq([runner_project_7, runner_project_6, runner_project_5,
- runner_project_4, runner_project_3, runner_project_2,
- runner_project_1, runner_sub_group_4, runner_sub_group_3,
- runner_sub_group_2, runner_sub_group_1, runner_group])
- end
- end
-
- context 'with sort param' do
- let(:params) { { sort: 'contacted_asc' } }
-
- before do
- group.add_owner(user)
- end
-
- it 'sorts by specified attribute' do
- expect(subject).to eq([runner_group, runner_sub_group_1, runner_sub_group_2,
- runner_sub_group_3, runner_sub_group_4, runner_project_1,
- runner_project_2, runner_project_3, runner_project_4,
- runner_project_5, runner_project_6, runner_project_7])
+ context 'passing no params' do
+ it 'returns all descendant runners' do
+ expect(subject).to eq([runner_project_7, runner_project_6, runner_project_5,
+ runner_project_4, runner_project_3, runner_project_2,
+ runner_project_1, runner_sub_group_4, runner_sub_group_3,
+ runner_sub_group_2, runner_sub_group_1, runner_group])
+ end
end
- end
-
- context 'filter by search term' do
- let(:params) { { search: 'runner_project_search' } }
- before do
- group.add_owner(user)
- end
+ context 'with sort param' do
+ let(:params) { { sort: 'contacted_asc' } }
- it 'returns correct runner' do
- expect(subject).to eq([runner_project_3])
+ it 'sorts by specified attribute' do
+ expect(subject).to eq([runner_group, runner_sub_group_1, runner_sub_group_2,
+ runner_sub_group_3, runner_sub_group_4, runner_project_1,
+ runner_project_2, runner_project_3, runner_project_4,
+ runner_project_5, runner_project_6, runner_project_7])
+ end
end
- end
- context 'filter by status' do
- let(:params) { { status_status: 'paused' } }
+ context 'filtering' do
+ context 'by search term' do
+ let(:params) { { search: 'runner_project_search' } }
- before do
- group.add_owner(user)
- end
-
- it 'returns correct runner' do
- expect(subject).to eq([runner_sub_group_1])
- end
- end
+ it 'returns correct runner' do
+ expect(subject).to eq([runner_project_3])
+ end
+ end
- context 'filter by tag_name' do
- let(:params) { { tag_name: %w[runner_tag] } }
+ context 'by status' do
+ let(:params) { { status_status: 'paused' } }
- before do
- group.add_owner(user)
- end
+ it 'returns correct runner' do
+ expect(subject).to eq([runner_sub_group_1])
+ end
+ end
- it 'returns correct runner' do
- expect(subject).to eq([runner_project_5])
- end
- end
+ context 'by tag_name' do
+ let(:params) { { tag_name: %w[runner_tag] } }
- context 'filter by runner type' do
- let(:params) { { type_type: 'project_type' } }
+ it 'returns correct runner' do
+ expect(subject).to eq([runner_project_5])
+ end
+ end
- before do
- group.add_owner(user)
- end
+ context 'by runner type' do
+ let(:params) { { type_type: 'project_type' } }
- it 'returns correct runners' do
- expect(subject).to eq([runner_project_7, runner_project_6,
- runner_project_5, runner_project_4,
- runner_project_3, runner_project_2, runner_project_1])
+ it 'returns correct runners' do
+ expect(subject).to eq([runner_project_7, runner_project_6,
+ runner_project_5, runner_project_4,
+ runner_project_3, runner_project_2, runner_project_1])
+ end
+ end
end
end
- context 'user has no access to runners' do
+ context 'when user is not group owner' do
where(:user_permission) do
[:maintainer, :developer, :reporter, :guest]
end
@@ -276,13 +262,13 @@ RSpec.describe Ci::RunnersFinder do
end
end
- context 'user with no access' do
+ context 'when user has no access' do
it 'returns no runners' do
expect(subject).to be_empty
end
end
- context 'user is nil' do
+ context 'when user is nil' do
let_it_be(:user) { nil }
it 'returns no runners' do
@@ -294,7 +280,7 @@ RSpec.describe Ci::RunnersFinder do
describe '#sort_key' do
subject { described_class.new(current_user: user, group: group, params: params).sort_key }
- context 'no params' do
+ context 'without params' do
it 'returns created_at_desc' do
expect(subject).to eq('created_at_desc')
end
diff --git a/spec/finders/error_tracking/errors_finder_spec.rb b/spec/finders/error_tracking/errors_finder_spec.rb
new file mode 100644
index 00000000000..2df5f1653e0
--- /dev/null
+++ b/spec/finders/error_tracking/errors_finder_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::ErrorsFinder do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:error) { create(:error_tracking_error, project: project) }
+ let_it_be(:error_resolved) { create(:error_tracking_error, :resolved, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '#execute' do
+ let(:params) { {} }
+
+ subject { described_class.new(user, project, params).execute }
+
+ it { is_expected.to contain_exactly(error, error_resolved) }
+
+ context 'with status parameter' do
+ let(:params) { { status: 'resolved' } }
+
+ it { is_expected.to contain_exactly(error_resolved) }
+ end
+ end
+end
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index 3238f6744f7..0d797b7923c 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -38,6 +38,12 @@ RSpec.describe GroupMembersFinder, '#execute' do
}
end
+ it 'raises an error if a non-supported relation type is used' do
+ expect do
+ described_class.new(group).execute(include_relations: [:direct, :invalid_relation_type])
+ end.to raise_error(ArgumentError, "invalid_relation_type is not a valid relation type. Valid relation types are direct, inherited, descendants.")
+ end
+
using RSpec::Parameterized::TableSyntax
where(:subject_relations, :subject_group, :expected_members) do
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index 481e2983dd7..10a08d7326e 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -229,5 +229,36 @@ RSpec.describe GroupsFinder do
end
end
end
+
+ context 'with search' do
+ let_it_be(:parent_group) { create(:group, :public, name: 'Parent Group') }
+ let_it_be(:test_group) { create(:group, :public, path: 'test-path') }
+
+ it 'returns all groups with matching title' do
+ expect(described_class.new(user, { search: 'parent' }).execute).to contain_exactly(parent_group)
+ end
+
+ it 'returns all groups with matching path' do
+ expect(described_class.new(user, { search: 'test' }).execute).to contain_exactly(test_group)
+ end
+
+ it 'does not search in full path if parent is set' do
+ matching_subgroup = create(:group, parent: parent_group, path: "#{parent_group.path}-subgroup")
+
+ expect(described_class.new(user, { search: 'parent', parent: parent_group }).execute).to contain_exactly(matching_subgroup)
+ end
+
+ context 'with group descendants' do
+ let_it_be(:sub_group) { create(:group, :public, name: 'Sub Group', parent: parent_group) }
+
+ let(:params) { { search: parent_group.path } }
+
+ it 'searches in full path if descendant groups are not included' do
+ params[:include_parent_descendants] = false
+
+ expect(described_class.new(user, params).execute).to contain_exactly(parent_group, sub_group)
+ end
+ end
+ end
end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 1c8c2af8e03..0cb73f3da6d 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -789,7 +789,7 @@ RSpec.describe IssuesFinder do
context 'user filters confidential issues' do
let(:params) { { confidential: true } }
- it 'returns only confdential issues' do
+ it 'returns only confidential issues' do
expect(issues).to contain_exactly(confidential_issue)
end
end
@@ -797,7 +797,7 @@ RSpec.describe IssuesFinder do
context 'user filters only public issues' do
let(:params) { { confidential: false } }
- it 'returns only confdential issues' do
+ it 'returns only public issues' do
expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
end
end
@@ -1004,9 +1004,38 @@ RSpec.describe IssuesFinder do
let(:guest) { create(:user) }
let_it_be(:authorized_user) { create(:user) }
+ let_it_be(:banned_user) { create(:user, :banned) }
let_it_be(:project) { create(:project, namespace: authorized_user.namespace) }
let_it_be(:public_issue) { create(:issue, project: project) }
let_it_be(:confidential_issue) { create(:issue, project: project, confidential: true) }
+ let_it_be(:hidden_issue) { create(:issue, project: project, author: banned_user) }
+
+ shared_examples 'returns public, does not return hidden or confidential' do
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue, hidden_issue)
+ end
+ end
+
+ shared_examples 'returns public and confidential, does not return hidden' do
+ it 'returns only public and confidential issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ expect(subject).not_to include(hidden_issue)
+ end
+ end
+
+ shared_examples 'returns public and hidden, does not return confidential' do
+ it 'returns only public and hidden issues' do
+ expect(subject).to include(public_issue, hidden_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
+ end
+
+ shared_examples 'returns public, confidential, and hidden' do
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue, hidden_issue)
+ end
+ end
context 'when no project filter is given' do
let(:params) { {} }
@@ -1014,18 +1043,28 @@ RSpec.describe IssuesFinder do
context 'for an anonymous user' do
subject { described_class.new(nil, params).with_confidentiality_access_check }
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
end
context 'for a user without project membership' do
subject { described_class.new(user, params).with_confidentiality_access_check }
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
end
@@ -1036,17 +1075,28 @@ RSpec.describe IssuesFinder do
project.add_guest(guest)
end
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
end
context 'for a project member with access to view confidential issues' do
subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue)
+ it_behaves_like 'returns public and confidential, does not return hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
end
end
@@ -1056,15 +1106,26 @@ RSpec.describe IssuesFinder do
subject { described_class.new(admin_user, params).with_confidentiality_access_check }
context 'when admin mode is enabled', :enable_admin_mode do
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue)
+ it_behaves_like 'returns public, confidential, and hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
end
end
context 'when admin mode is disabled' do
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
end
end
@@ -1076,14 +1137,18 @@ RSpec.describe IssuesFinder do
context 'for an anonymous user' do
subject { described_class.new(nil, params).with_confidentiality_access_check }
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
it 'does not filter by confidentiality' do
expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
-
subject
end
end
@@ -1091,9 +1156,14 @@ RSpec.describe IssuesFinder do
context 'for a user without project membership' do
subject { described_class.new(user, params).with_confidentiality_access_check }
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
it 'filters by confidentiality' do
@@ -1108,9 +1178,14 @@ RSpec.describe IssuesFinder do
project.add_guest(guest)
end
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
it 'filters by confidentiality' do
@@ -1121,8 +1196,14 @@ RSpec.describe IssuesFinder do
context 'for a project member with access to view confidential issues' do
subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue)
+ it_behaves_like 'returns public and confidential, does not return hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
end
it 'does not filter by confidentiality' do
@@ -1138,8 +1219,14 @@ RSpec.describe IssuesFinder do
subject { described_class.new(admin_user, params).with_confidentiality_access_check }
context 'when admin mode is enabled', :enable_admin_mode do
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue)
+ it_behaves_like 'returns public, confidential, and hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
end
it 'does not filter by confidentiality' do
@@ -1150,9 +1237,14 @@ RSpec.describe IssuesFinder do
end
context 'when admin mode is disabled' do
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue)
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
end
it 'filters by confidentiality' do
diff --git a/spec/finders/lfs_pointers_finder_spec.rb b/spec/finders/lfs_pointers_finder_spec.rb
new file mode 100644
index 00000000000..2f45f383f2f
--- /dev/null
+++ b/spec/finders/lfs_pointers_finder_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LfsPointersFinder do
+ subject(:finder) { described_class.new(repository, path) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ let(:path) { nil }
+
+ describe '#execute' do
+ subject { finder.execute }
+
+ let(:expected_blob_id) { '0c304a93cb8430108629bbbcaa27db3343299bc0' }
+
+ context 'when path has no LFS files' do
+ it { is_expected.to eq([]) }
+ end
+
+ context 'when path points to LFS file' do
+ let(:path) { 'files/lfs/lfs_object.iso' }
+
+ it 'returns LFS blob ids' do
+ is_expected.to eq([expected_blob_id])
+ end
+ end
+
+ context 'when path points to directory with LFS files' do
+ let(:path) { 'files/lfs/' }
+
+ it 'returns LFS blob ids' do
+ is_expected.to eq([expected_blob_id])
+ end
+ end
+
+ context 'when repository is empty' do
+ let(:project) { create(:project, :empty_repo) }
+
+ it { is_expected.to eq([]) }
+ end
+ end
+end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index c2ea918449c..49b29cefb9b 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -317,18 +317,6 @@ RSpec.describe MergeRequestsFinder do
)
end
- context 'when merge_request_draft_filter is disabled' do
- it 'does not include draft merge requests' do
- stub_feature_flags(merge_request_draft_filter: false)
-
- merge_requests = described_class.new(user, { draft_param_key => 'yes' }).execute
-
- expect(merge_requests).to contain_exactly(
- merge_request4, merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4
- )
- end
- end
-
it "filters by not #{draft_param_key}" do
params = { draft_param_key => 'no' }
diff --git a/spec/finders/packages/pypi/packages_finder_spec.rb b/spec/finders/packages/pypi/packages_finder_spec.rb
index a69c2317261..1a44fb99009 100644
--- a/spec/finders/packages/pypi/packages_finder_spec.rb
+++ b/spec/finders/packages/pypi/packages_finder_spec.rb
@@ -14,14 +14,14 @@ RSpec.describe Packages::Pypi::PackagesFinder do
let(:package_name) { package2.name }
- describe 'execute!' do
- subject { described_class.new(user, scope, package_name: package_name).execute! }
+ describe 'execute' do
+ subject { described_class.new(user, scope, package_name: package_name).execute }
shared_examples 'when no package is found' do
context 'non-existing package' do
let(:package_name) { 'none' }
- it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ it { expect(subject).to be_empty }
end
end
@@ -29,7 +29,7 @@ RSpec.describe Packages::Pypi::PackagesFinder do
context 'non-existing package' do
let(:package_name) { package2.name.upcase.tr('-', '.') }
- it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ it { expect(subject).to be_empty }
end
end
@@ -45,7 +45,7 @@ RSpec.describe Packages::Pypi::PackagesFinder do
context 'within a group' do
let(:scope) { group }
- it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ it { expect(subject).to be_empty }
context 'user with access to only one project' do
before do
diff --git a/spec/finders/projects/members/effective_access_level_per_user_finder_spec.rb b/spec/finders/projects/members/effective_access_level_per_user_finder_spec.rb
new file mode 100644
index 00000000000..3872938d20e
--- /dev/null
+++ b/spec/finders/projects/members/effective_access_level_per_user_finder_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Projects::Members::EffectiveAccessLevelPerUserFinder, '#execute' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+
+ # The result set is being converted to json just for the ease of testing.
+ subject { described_class.new(project, user).execute.as_json }
+
+ context 'a combination of all possible avenues of membership' do
+ let_it_be(:another_user) { create(:user) }
+ let_it_be(:shared_with_group) { create(:group) }
+
+ before do
+ create(:project_group_link, :maintainer, project: project, group: shared_with_group)
+ create(:group_group_link, :reporter, shared_group: project.group, shared_with_group: shared_with_group)
+
+ shared_with_group.add_maintainer(user)
+ shared_with_group.add_maintainer(another_user)
+ group.add_guest(user)
+ group.add_guest(another_user)
+ project.add_developer(user)
+ project.add_developer(another_user)
+ end
+
+ it 'includes the highest access level from all avenues of memberships for the specific user alone' do
+ expect(subject).to eq(
+ [{
+ 'user_id' => user.id,
+ 'access_level' => Gitlab::Access::MAINTAINER, # From project_group_link
+ 'id' => nil
+ }]
+ )
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
index 3dfe6712b75..9e8bf7c52d0 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_details.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -99,6 +99,48 @@
"status": {
"type": ["string"],
"enum": ["DEFAULT", "HIDDEN", "PROCESSING", "ERROR"]
+ },
+ "dependencyLinks": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "pageInfo": { "type": "object" },
+ "edges": { "type": "array" },
+ "nodes": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "dependencyType": {
+ "type": "string"
+ },
+ "dependency": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "versionPattern": {
+ "type": "string"
+ }
+ }
+ },
+ "metadata": {
+ "anyOf": [
+ { "$ref": "./package_nuget_dependency_link_metadata.json" },
+ { "type": "null" }
+ ]
+ }
+ }
+ }
+ }
+ }
}
}
}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_nuget_dependency_link_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_nuget_dependency_link_metadata.json
new file mode 100644
index 00000000000..0738354528e
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_nuget_dependency_link_metadata.json
@@ -0,0 +1,12 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "targetFramework": {
+ "type": "string"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/pipeline_schedule.json b/spec/fixtures/api/schemas/pipeline_schedule.json
index 8a175ba081f..cdb4aea76da 100644
--- a/spec/fixtures/api/schemas/pipeline_schedule.json
+++ b/spec/fixtures/api/schemas/pipeline_schedule.json
@@ -18,6 +18,7 @@
"sha": { "type": "string" },
"ref": { "type": "string" },
"status": { "type": "string" },
+ "source": { "type": "string" },
"web_url": { "type": ["string", "null"] },
"created_at": { "type": ["string", "null"], "format": "date-time" },
"updated_at": { "type": ["string", "null"], "format": "date-time" }
diff --git a/spec/fixtures/api/schemas/public_api/v4/environments.json b/spec/fixtures/api/schemas/public_api/v4/environments.json
new file mode 100644
index 00000000000..f739c06f604
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/environments.json
@@ -0,0 +1,9 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "$ref": "./environment.json"
+ }
+ }
+}
diff --git a/spec/fixtures/emails/no_content_with_quote.eml b/spec/fixtures/emails/no_content_with_quote.eml
new file mode 100644
index 00000000000..e2e86c2ea4c
--- /dev/null
+++ b/spec/fixtures/emails/no_content_with_quote.eml
@@ -0,0 +1,23 @@
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <jake@adventuretime.ooo>
+To: reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject: re: [Discourse Meta] eviltrout posted in 'Adventure Time Sux'
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
+
+--
+> quote line 1
+> quote line 2
+> quote line 3
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
index f694e617320..8495d983d10 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
@@ -12,7 +12,8 @@ milestone: "13.9"
introduced_by_url:
time_frame: 7d
data_source:
-data_category: Operational
+data_category: operational
+performance_indicator_type:
distribution:
- ce
# Add here corresponding tiers
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
index 9de4d2a5644..82e9af5b04f 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
@@ -12,7 +12,8 @@ milestone: "13.9"
introduced_by_url:
time_frame: 7d
data_source:
-data_category: Optional
+data_category: optional
+performance_indicator_type:
distribution:
- ee
tier:
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
index 0e7de369c82..aad7dc76290 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
@@ -13,7 +13,8 @@ milestone: "13.9"
introduced_by_url:
time_frame: 7d
data_source:
-data_category: Optional
+data_category: optional
+performance_indicator_type:
distribution:
- ce
- ee
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_database_metric.rb b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_database_metric.rb
new file mode 100644
index 00000000000..abbe9a8c332
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_database_metric.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class CountFooMetric < DatabaseMetric
+ operation :count
+
+ relation do
+ # Insert ActiveRecord relation here
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric.rb b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_generic_metric.rb
index 9816ff7c9eb..df6152a5659 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric.rb
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_generic_metric.rb
@@ -4,8 +4,9 @@ module Gitlab
module Usage
module Metrics
module Instrumentations
- class CountFooMetric < RedisHLLMetric
- def value
+ class CountFooMetric < GenericMetric
+ value do
+ # Insert metric code logic here
end
end
end
diff --git a/spec/fixtures/packages/debian/distribution/InRelease b/spec/fixtures/packages/debian/distribution/InRelease
new file mode 100644
index 00000000000..49eb6156e39
--- /dev/null
+++ b/spec/fixtures/packages/debian/distribution/InRelease
@@ -0,0 +1,8 @@
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA256
+
+Codename: fixture-distribution
+-----BEGIN PGP SIGNATURE-----
+
+ABC
+-----BEGIN PGP SIGNATURE-----
diff --git a/spec/fixtures/private_key.asc b/spec/fixtures/private_key.asc
new file mode 100644
index 00000000000..d9252bafd62
--- /dev/null
+++ b/spec/fixtures/private_key.asc
@@ -0,0 +1,17 @@
+-----BEGIN PGP PRIVATE KEY BLOCK-----
+Comment: Alice's OpenPGP Transferable Secret Key
+Comment: https://www.ietf.org/id/draft-bre-openpgp-samples-01.html
+
+lFgEXEcE6RYJKwYBBAHaRw8BAQdArjWwk3FAqyiFbFBKT4TzXcVBqPTB3gmzlC/U
+b7O1u10AAP9XBeW6lzGOLx7zHH9AsUDUTb2pggYGMzd0P3ulJ2AfvQ4RtCZBbGlj
+ZSBMb3ZlbGFjZSA8YWxpY2VAb3BlbnBncC5leGFtcGxlPoiQBBMWCAA4AhsDBQsJ
+CAcCBhUKCQgLAgQWAgMBAh4BAheAFiEE64W7X6M6deFelE5j8jFVDE9H444FAl2l
+nzoACgkQ8jFVDE9H447pKwD6A5xwUqIDprBzrHfahrImaYEZzncqb25vkLV2arYf
+a78A/R3AwtLQvjxwLDuzk4dUtUwvUYibL2sAHwj2kGaHnfICnF0EXEcE6RIKKwYB
+BAGXVQEFAQEHQEL/BiGtq0k84Km1wqQw2DIikVYrQrMttN8d7BPfnr4iAwEIBwAA
+/3/xFPG6U17rhTuq+07gmEvaFYKfxRB6sgAYiW6TMTpQEK6IeAQYFggAIBYhBOuF
+u1+jOnXhXpROY/IxVQxPR+OOBQJcRwTpAhsMAAoJEPIxVQxPR+OOWdABAMUdSzpM
+hzGs1O0RkWNQWbUzQ8nUOeD9wNbjE3zR+yfRAQDbYqvtWQKN4AQLTxVJN5X5AWyb
+Pnn+We1aTBhaGa86AQ==
+=n8OM
+-----END PGP PRIVATE KEY BLOCK-----
diff --git a/spec/fixtures/public_key.asc b/spec/fixtures/public_key.asc
new file mode 100644
index 00000000000..68fdb39324e
--- /dev/null
+++ b/spec/fixtures/public_key.asc
@@ -0,0 +1,15 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Comment: Alice's OpenPGP certificate
+Comment: https://www.ietf.org/id/draft-bre-openpgp-samples-01.html
+
+mDMEXEcE6RYJKwYBBAHaRw8BAQdArjWwk3FAqyiFbFBKT4TzXcVBqPTB3gmzlC/U
+b7O1u120JkFsaWNlIExvdmVsYWNlIDxhbGljZUBvcGVucGdwLmV4YW1wbGU+iJAE
+ExYIADgCGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AWIQTrhbtfozp14V6UTmPy
+MVUMT0fjjgUCXaWfOgAKCRDyMVUMT0fjjukrAPoDnHBSogOmsHOsd9qGsiZpgRnO
+dypvbm+QtXZqth9rvwD9HcDC0tC+PHAsO7OTh1S1TC9RiJsvawAfCPaQZoed8gK4
+OARcRwTpEgorBgEEAZdVAQUBAQdAQv8GIa2rSTzgqbXCpDDYMiKRVitCsy203x3s
+E9+eviIDAQgHiHgEGBYIACAWIQTrhbtfozp14V6UTmPyMVUMT0fjjgUCXEcE6QIb
+DAAKCRDyMVUMT0fjjlnQAQDFHUs6TIcxrNTtEZFjUFm1M0PJ1Dng/cDW4xN80fsn
+0QEA22Kr7VkCjeAEC08VSTeV+QFsmz55/lntWkwYWhmvOgE=
+=iIGO
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/spec/fixtures/security_reports/deprecated/gl-sast-report.json b/spec/fixtures/security_reports/deprecated/gl-sast-report.json
new file mode 100644
index 00000000000..2f7e47281e2
--- /dev/null
+++ b/spec/fixtures/security_reports/deprecated/gl-sast-report.json
@@ -0,0 +1,964 @@
+[
+ {
+ "category": "sast",
+ "message": "Probable insecure usage of temp file/directory.",
+ "cve": "python/hardcoded/hardcoded-tmp.py:52865813c884a507be1f152d654245af34aba8a391626d01f1ab6d3f52ec8779:B108",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "start_line": 1,
+ "end_line": 1
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B108",
+ "value": "B108",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "line": 1,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "name": "Predictable pseudorandom number generator",
+ "message": "Predictable pseudorandom number generator",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:47:PREDICTABLE_RANDOM",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 47,
+ "end_line": 47,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken2"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-PREDICTABLE_RANDOM",
+ "value": "PREDICTABLE_RANDOM",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 47,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM",
+ "tool": "find_sec_bugs"
+ },
+ {
+ "category": "sast",
+ "name": "Predictable pseudorandom number generator",
+ "message": "Predictable pseudorandom number generator",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:41:PREDICTABLE_RANDOM",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 41,
+ "end_line": 41,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken1"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-PREDICTABLE_RANDOM",
+ "value": "PREDICTABLE_RANDOM",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 41,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM",
+ "tool": "find_sec_bugs"
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:cb203b465dffb0cb3a8e8bd8910b84b93b0a5995a938e4b903dbb0cd6ffa1254:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 11,
+ "end_line": 11
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 11,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:a7173c43ae66bd07466632d819d450e0071e02dbf782763640d1092981f9631b:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 12,
+ "end_line": 12
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 12,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:017017b77deb0b8369b6065947833eeea752a92ec8a700db590fece3e934cf0d:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 13,
+ "end_line": 13
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 13,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:45fc8c53aea7b84f06bc4e590cc667678d6073c4c8a1d471177ca2146fb22db2:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 14,
+ "end_line": 14
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 14,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Pickle library appears to be in use, possible security issue.",
+ "cve": "python/imports/imports-aliases.py:5f200d47291e7bbd8352db23019b85453ca048dd98ea0c291260fa7d009963a4:B301",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 15,
+ "end_line": 15
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B301",
+ "value": "B301"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 15,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "name": "ECB mode is insecure",
+ "message": "ECB mode is insecure",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:ECB_MODE",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-ECB_MODE",
+ "value": "ECB_MODE",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 29,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE",
+ "tool": "find_sec_bugs"
+ },
+ {
+ "category": "sast",
+ "name": "Cipher with no integrity",
+ "message": "Cipher with no integrity",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:CIPHER_INTEGRITY",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-CIPHER_INTEGRITY",
+ "value": "CIPHER_INTEGRITY",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 29,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY",
+ "tool": "find_sec_bugs"
+ },
+ {
+ "category": "sast",
+ "message": "Probable insecure usage of temp file/directory.",
+ "cve": "python/hardcoded/hardcoded-tmp.py:63dd4d626855555b816985d82c4614a790462a0a3ada89dc58eb97f9c50f3077:B108",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "start_line": 14,
+ "end_line": 14
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B108",
+ "value": "B108",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "line": 14,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Probable insecure usage of temp file/directory.",
+ "cve": "python/hardcoded/hardcoded-tmp.py:4ad6d4c40a8c263fc265f3384724014e0a4f8dd6200af83e51ff120420038031:B108",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "start_line": 10,
+ "end_line": 10
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B108",
+ "value": "B108",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "line": 10,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with Popen module.",
+ "cve": "python/imports/imports-aliases.py:2c3e1fa1e54c3c6646e8bcfaee2518153c6799b77587ff8d9a7b0631f6d34785:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 1,
+ "end_line": 1
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 1,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with pickle module.",
+ "cve": "python/imports/imports.py:af58d07f6ad519ef5287fcae65bf1a6999448a1a3a8bc1ac2a11daa80d0b96bf:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports.py",
+ "start_line": 2,
+ "end_line": 2
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports.py",
+ "line": 2,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with subprocess module.",
+ "cve": "python/imports/imports.py:8de9bc98029d212db530785a5f6780cfa663548746ff228ab8fa96c5bb82f089:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports.py",
+ "start_line": 4,
+ "end_line": 4
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports.py",
+ "line": 4,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'blerg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:97c30f1d76d2a88913e3ce9ae74087874d740f87de8af697a9c455f01119f633:B106",
+ "severity": "Low",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 22,
+ "end_line": 22
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B106",
+ "value": "B106",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 22,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'root'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:7431c73a0bc16d94ece2a2e75ef38f302574d42c37ac0c3c38ad0b3bf8a59f10:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 5,
+ "end_line": 5
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 5,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: ''",
+ "cve": "python/hardcoded/hardcoded-passwords.py:d2d1857c27caedd49c57bfbcdc23afcc92bd66a22701fcdc632869aab4ca73ee:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 9,
+ "end_line": 9
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 9,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'ajklawejrkl42348swfgkg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:fb3866215a61393a5c9c32a3b60e2058171a23219c353f722cbd3567acab21d2:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 13,
+ "end_line": 13
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 13,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'blerg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:63c62a8b7e1e5224439bd26b28030585ac48741e28ca64561a6071080c560a5f:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 23,
+ "end_line": 23
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 23,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'blerg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:4311b06d08df8fa58229b341c531da8e1a31ec4520597bdff920cd5c098d86f9:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 24,
+ "end_line": 24
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 24,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with subprocess module.",
+ "cve": "python/imports/imports-function.py:5858400c2f39047787702de44d03361ef8d954c9d14bd54ee1c2bef9e6a7df93:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-function.py",
+ "start_line": 4,
+ "end_line": 4
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-function.py",
+ "line": 4,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with pickle module.",
+ "cve": "python/imports/imports-function.py:dbda3cf4190279d30e0aad7dd137eca11272b0b225e8af4e8bf39682da67d956:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-function.py",
+ "start_line": 2,
+ "end_line": 2
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-function.py",
+ "line": 2,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with Popen module.",
+ "cve": "python/imports/imports-from.py:eb8a0db9cd1a8c1ab39a77e6025021b1261cc2a0b026b2f4a11fca4e0636d8dd:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-from.py",
+ "start_line": 7,
+ "end_line": 7
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-from.py",
+ "line": 7,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "subprocess call with shell=True seems safe, but may be changed in the future, consider rewriting without shell",
+ "cve": "python/imports/imports-aliases.py:f99f9721e27537fbcb6699a4cf39c6740d6234d2c6f06cfc2d9ea977313c483d:B602",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 9,
+ "end_line": 9
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B602",
+ "value": "B602",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 9,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html",
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with subprocess module.",
+ "cve": "python/imports/imports-from.py:332a12ab1146698f614a905ce6a6a5401497a12281aef200e80522711c69dcf4:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-from.py",
+ "start_line": 6,
+ "end_line": 6
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-from.py",
+ "line": 6,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with Popen module.",
+ "cve": "python/imports/imports-from.py:0a48de4a3d5348853a03666cb574697e3982998355e7a095a798bd02a5947276:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-from.py",
+ "start_line": 1,
+ "end_line": 2
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-from.py",
+ "line": 1,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with pickle module.",
+ "cve": "python/imports/imports-aliases.py:51b71661dff994bde3529639a727a678c8f5c4c96f00d300913f6d5be1bbdf26:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 7,
+ "end_line": 8
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 7,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with loads module.",
+ "cve": "python/imports/imports-aliases.py:6ff02aeb3149c01ab68484d794a94f58d5d3e3bb0d58557ef4153644ea68ea54:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "scanner": {
+ "id": "bandit",
+ "name": "Bandit"
+ },
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 6,
+ "end_line": 6
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 6,
+ "tool": "bandit"
+ },
+ {
+ "category": "sast",
+ "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
+ "cve": "c/subdir/utils.c:b466873101951fe96e1332f6728eb7010acbbd5dfc3b65d7d53571d091a06d9e:CWE-119!/CWE-120",
+ "confidence": "Low",
+ "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
+ "scanner": {
+ "id": "flawfinder",
+ "name": "Flawfinder"
+ },
+ "location": {
+ "file": "c/subdir/utils.c",
+ "start_line": 4
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - char",
+ "value": "char"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-119",
+ "value": "119",
+ "url": "https://cwe.mitre.org/data/definitions/119.html"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-120",
+ "value": "120",
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ],
+ "file": "c/subdir/utils.c",
+ "line": 4,
+ "url": "https://cwe.mitre.org/data/definitions/119.html",
+ "tool": "flawfinder"
+ },
+ {
+ "category": "sast",
+ "message": "Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents? (CWE-362)",
+ "cve": "c/subdir/utils.c:bab681140fcc8fc3085b6bba74081b44ea145c1c98b5e70cf19ace2417d30770:CWE-362",
+ "confidence": "Low",
+ "scanner": {
+ "id": "flawfinder",
+ "name": "Flawfinder"
+ },
+ "location": {
+ "file": "c/subdir/utils.c",
+ "start_line": 8
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - fopen",
+ "value": "fopen"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-362",
+ "value": "362",
+ "url": "https://cwe.mitre.org/data/definitions/362.html"
+ }
+ ],
+ "file": "c/subdir/utils.c",
+ "line": 8,
+ "url": "https://cwe.mitre.org/data/definitions/362.html",
+ "tool": "flawfinder"
+ },
+ {
+ "category": "sast",
+ "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
+ "cve": "cplusplus/src/hello.cpp:c8c6dd0afdae6814194cf0930b719f757ab7b379cf8f261e7f4f9f2f323a818a:CWE-119!/CWE-120",
+ "confidence": "Low",
+ "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
+ "scanner": {
+ "id": "flawfinder",
+ "name": "Flawfinder"
+ },
+ "location": {
+ "file": "cplusplus/src/hello.cpp",
+ "start_line": 6
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - char",
+ "value": "char"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-119",
+ "value": "119",
+ "url": "https://cwe.mitre.org/data/definitions/119.html"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-120",
+ "value": "120",
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ],
+ "file": "cplusplus/src/hello.cpp",
+ "line": 6,
+ "url": "https://cwe.mitre.org/data/definitions/119.html",
+ "tool": "flawfinder"
+ },
+ {
+ "category": "sast",
+ "message": "Does not check for buffer overflows when copying to destination [MS-banned] (CWE-120)",
+ "cve": "cplusplus/src/hello.cpp:331c04062c4fe0c7c486f66f59e82ad146ab33cdd76ae757ca41f392d568cbd0:CWE-120",
+ "confidence": "Low",
+ "solution": "Consider using snprintf, strcpy_s, or strlcpy (warning: strncpy easily misused)",
+ "scanner": {
+ "id": "flawfinder",
+ "name": "Flawfinder"
+ },
+ "location": {
+ "file": "cplusplus/src/hello.cpp",
+ "start_line": 7
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - strcpy",
+ "value": "strcpy"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-120",
+ "value": "120",
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ],
+ "file": "cplusplus/src/hello.cpp",
+ "line": 7,
+ "url": "https://cwe.mitre.org/data/definitions/120.html",
+ "tool": "flawfinder"
+ }
+]
diff --git a/spec/fixtures/security_reports/feature-branch/gl-sast-report.json b/spec/fixtures/security_reports/feature-branch/gl-sast-report.json
new file mode 100644
index 00000000000..f93233e0ebb
--- /dev/null
+++ b/spec/fixtures/security_reports/feature-branch/gl-sast-report.json
@@ -0,0 +1,177 @@
+{
+ "version": "14.0.0",
+ "vulnerabilities": [
+ {
+ "category": "sast",
+ "name": "Predictable pseudorandom number generator",
+ "message": "Predictable pseudorandom number generator",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:47:PREDICTABLE_RANDOM",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 47,
+ "end_line": 47,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken2"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-PREDICTABLE_RANDOM",
+ "value": "PREDICTABLE_RANDOM",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ }
+ ]
+ },
+ {
+ "category": "sast",
+ "name": "Predictable pseudorandom number generator",
+ "message": "Predictable pseudorandom number generator",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:41:PREDICTABLE_RANDOM",
+ "severity": "Low",
+ "confidence": "Low",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 41,
+ "end_line": 41,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken1"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-PREDICTABLE_RANDOM",
+ "value": "PREDICTABLE_RANDOM",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ }
+ ]
+ },
+ {
+ "category": "sast",
+ "name": "ECB mode is insecure",
+ "message": "ECB mode is insecure",
+ "description": "The cipher uses ECB mode, which provides poor confidentiality for encrypted data",
+ "cve": "ea0f905fc76f2739d5f10a1fd1e37a10:ECB_MODE:java-maven/src/main/java/com/gitlab/security_products/tests/App.java:29",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "java-maven/src/main/java/com/gitlab/security_products/tests/App.java",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-ECB_MODE",
+ "value": "ECB_MODE",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-327",
+ "value": "327",
+ "url": "https://cwe.mitre.org/data/definitions/327.html"
+ }
+ ]
+ },
+ {
+ "category": "sast",
+ "name": "Hard coded key",
+ "message": "Hard coded key",
+ "description": "Hard coded cryptographic key found",
+ "cve": "102ac67e0975ecec02a056008e0faad8:HARD_CODE_KEY:scala-sbt/src/main/scala/example/Main.scala:12",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "scala-sbt/src/main/scala/example/Main.scala",
+ "start_line": 12,
+ "end_line": 12,
+ "class": "example.Main$",
+ "method": "getBytes"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-HARD_CODE_KEY",
+ "value": "HARD_CODE_KEY",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#HARD_CODE_KEY"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-321",
+ "value": "321",
+ "url": "https://cwe.mitre.org/data/definitions/321.html"
+ }
+ ]
+ },
+ {
+ "category": "sast",
+ "name": "ECB mode is insecure",
+ "message": "ECB mode is insecure",
+ "description": "The cipher uses ECB mode, which provides poor confidentiality for encrypted data",
+ "cve": "ea0f905fc76f2739d5f10a1fd1e37a10:ECB_MODE:app/src/main/groovy/com/gitlab/security_products/tests/App.groovy:29",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "app/src/main/groovy/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-ECB_MODE",
+ "value": "ECB_MODE",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-327",
+ "value": "327",
+ "url": "https://cwe.mitre.org/data/definitions/327.html"
+ }
+ ]
+ }
+ ],
+ "remediations": [],
+ "scan": {
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs",
+ "url": "https://spotbugs.github.io",
+ "vendor": {
+ "name": "GitLab"
+ },
+ "version": "4.0.2"
+ },
+ "type": "sast",
+ "status": "success",
+ "start_time": "placeholder-value",
+ "end_time": "placeholder-value"
+ }
+}
diff --git a/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json b/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json
new file mode 100644
index 00000000000..57a4dee3ddd
--- /dev/null
+++ b/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json
@@ -0,0 +1,5 @@
+{
+ "version": "3.0",
+ "vulnerabilities": [],
+ "remediations": []
+}
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json b/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json
new file mode 100644
index 00000000000..3cfb3e51ef7
--- /dev/null
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json
@@ -0,0 +1,168 @@
+{
+ "vulnerabilities": [
+ {
+ "category": "dependency_scanning",
+ "name": "Vulnerabilities in libxml2",
+ "message": "Vulnerabilities in libxml2 in nokogiri",
+ "description": "",
+ "cve": "CVE-1020",
+ "severity": "High",
+ "solution": "Upgrade to latest version.",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {},
+ "identifiers": [],
+ "links": [
+ {
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020"
+ }
+ ]
+ },
+ {
+ "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3",
+ "category": "dependency_scanning",
+ "name": "Regular Expression Denial of Service",
+ "message": "",
+ "description": "",
+ "cve": "CVE-1030",
+ "severity": "Unknown",
+ "solution": "Upgrade to latest versions.",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {},
+ "identifiers": [],
+ "links": [
+ {
+ "name": "CVE-1030",
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030"
+ }
+ ]
+ },
+ {
+ "category": "dependency_scanning",
+ "name": "",
+ "message": "",
+ "description": "",
+ "cve": "CVE-2017-11429",
+ "severity": "Unknown",
+ "solution": "Upgrade to fixed version.\r\n",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {
+ "file": "yarn/yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "value": "2017-11429",
+ "type": "cwe",
+ "name": "CWE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cwename.cgi?name=CWE-2017-11429"
+ },
+ {
+ "value": "2017-11429",
+ "type": "cve",
+ "name": "CVE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-11429"
+ }
+ ],
+ "links": []
+ },
+ {
+ "category": "dependency_scanning",
+ "name": "",
+ "message": "",
+ "description": "",
+ "cve": "CWE-2017-11429",
+ "severity": "Unknown",
+ "solution": "Upgrade to fixed version.\r\n",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {
+ "file": "yarn/yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "value": "2017-11429",
+ "type": "cwe",
+ "name": "CwE-2017-11429",
+ "url": "https://cwe.mitre.org/cgi-bin/cwename.cgi?name=CWE-2017-11429"
+ },
+ {
+ "value": "2017-11429",
+ "type": "other",
+ "name": "other-2017-11429",
+ "url": "https://other.mitre.org/cgi-bin/othername.cgi?name=other-2017-11429"
+ }
+ ],
+ "links": []
+ },
+ {
+ "category": "dependency_scanning",
+ "name": "",
+ "message": "",
+ "description": "",
+ "cve": "OTHER-2017-11429",
+ "severity": "Unknown",
+ "solution": "Upgrade to fixed version.\r\n",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {
+ "file": "yarn/yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "value": "2017-11429",
+ "type": "other",
+ "name": "other-2017-11429",
+ "url": "https://other.mitre.org/cgi-bin/othername.cgi?name=other-2017-11429"
+ }
+ ],
+ "links": []
+ }
+ ],
+ "remediations": [],
+ "dependency_files": [],
+ "scan": {
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium",
+ "url": "https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven",
+ "vendor": {
+ "name": "GitLab"
+ },
+ "version": "2.18.0"
+ },
+ "type": "dependency_scanning",
+ "start_time": "placeholder-value",
+ "end_time": "placeholder-value",
+ "status": "success"
+ }
+}
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report.json b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
new file mode 100644
index 00000000000..cf4c5239b57
--- /dev/null
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
@@ -0,0 +1,160 @@
+{
+ "vulnerabilities": [
+ {
+ "category": "dependency_scanning",
+ "name": "Vulnerabilities in libxml2",
+ "message": "Vulnerabilities in libxml2 in nokogiri",
+ "description": "",
+ "cve": "CVE-1020",
+ "severity": "High",
+ "solution": "Upgrade to latest version.",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {},
+ "identifiers": [
+ {
+ "type": "GitLab",
+ "name": "Foo vulnerability",
+ "value": "foo"
+ }
+ ],
+ "links": [
+ {
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020"
+ }
+ ],
+ "details": {
+ "commit": {
+ "name": [
+ {
+ "lang": "en",
+ "value": "The Commit"
+ }
+ ],
+ "description": [
+ {
+ "lang": "en",
+ "value": "Commit where the vulnerability was identified"
+ }
+ ],
+ "type": "commit",
+ "value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
+ }
+ }
+ },
+ {
+ "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3",
+ "category": "dependency_scanning",
+ "name": "Regular Expression Denial of Service",
+ "message": "Regular Expression Denial of Service in debug",
+ "description": "",
+ "cve": "CVE-1030",
+ "severity": "Unknown",
+ "solution": "Upgrade to latest versions.",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {},
+ "identifiers": [
+ {
+ "type": "GitLab",
+ "name": "Bar vulnerability",
+ "value": "bar"
+ }
+ ],
+ "links": [
+ {
+ "name": "CVE-1030",
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030"
+ }
+ ]
+ },
+ {
+ "category": "dependency_scanning",
+ "name": "Authentication bypass via incorrect DOM traversal and canonicalization",
+ "message": "Authentication bypass via incorrect DOM traversal and canonicalization in saml2-js",
+ "description": "",
+ "cve": "yarn/yarn.lock:saml2-js:gemnasium:9952e574-7b5b-46fa-a270-aeb694198a98",
+ "severity": "Unknown",
+ "solution": "Upgrade to fixed version.\r\n",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {},
+ "identifiers": [],
+ "links": [
+ ]
+ }
+ ],
+ "remediations": [
+ {
+ "fixes": [
+ {
+ "cve": "CVE-1020"
+ }
+ ],
+ "summary": "",
+ "diff": ""
+ },
+ {
+ "fixes": [
+ {
+ "cve": "CVE",
+ "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3"
+ }
+ ],
+ "summary": "",
+ "diff": ""
+ },
+ {
+ "fixes": [
+ {
+ "cve": "CVE",
+ "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3"
+ }
+ ],
+ "summary": "",
+ "diff": ""
+ },
+ {
+ "fixes": [
+ {
+ "id": "2134",
+ "cve": "CVE-1"
+ }
+ ],
+ "summary": "",
+ "diff": ""
+ }
+ ],
+ "dependency_files": [],
+ "scan": {
+ "analyzer": {
+ "id": "common-analyzer",
+ "name": "Common Analyzer",
+ "url": "https://site.com/analyzer/common",
+ "version": "2.0.1",
+ "vendor": {
+ "name": "Common"
+ }
+ },
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium",
+ "url": "https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven",
+ "vendor": {
+ "name": "GitLab"
+ },
+ "version": "2.18.0"
+ },
+ "type": "dependency_scanning",
+ "start_time": "placeholder-value",
+ "end_time": "placeholder-value",
+ "status": "success"
+ },
+ "version": "14.0.2"
+}
diff --git a/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json b/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json
new file mode 100644
index 00000000000..f65580145b4
--- /dev/null
+++ b/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json
@@ -0,0 +1,802 @@
+{
+ "version": "1.2",
+ "vulnerabilities": [
+ {
+ "category": "sast",
+ "message": "Probable insecure usage of temp file/directory.",
+ "cve": "python/hardcoded/hardcoded-tmp.py:52865813c884a507be1f152d654245af34aba8a391626d01f1ab6d3f52ec8779:B108",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "start_line": 1,
+ "end_line": 1
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B108",
+ "value": "B108",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "line": 1,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ },
+ {
+ "category": "sast",
+ "name": "Predictable pseudorandom number generator",
+ "message": "Predictable pseudorandom number generator",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:47:PREDICTABLE_RANDOM",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 47,
+ "end_line": 47,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken2"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-PREDICTABLE_RANDOM",
+ "value": "PREDICTABLE_RANDOM",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 47,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ },
+ {
+ "category": "sast",
+ "name": "Predictable pseudorandom number generator",
+ "message": "Predictable pseudorandom number generator",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:41:PREDICTABLE_RANDOM",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 41,
+ "end_line": 41,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken1"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-PREDICTABLE_RANDOM",
+ "value": "PREDICTABLE_RANDOM",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 41,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:cb203b465dffb0cb3a8e8bd8910b84b93b0a5995a938e4b903dbb0cd6ffa1254:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 11,
+ "end_line": 11
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 11
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:a7173c43ae66bd07466632d819d450e0071e02dbf782763640d1092981f9631b:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 12,
+ "end_line": 12
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 12
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:017017b77deb0b8369b6065947833eeea752a92ec8a700db590fece3e934cf0d:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 13,
+ "end_line": 13
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 13
+ },
+ {
+ "category": "sast",
+ "message": "Use of insecure MD2, MD4, or MD5 hash function.",
+ "cve": "python/imports/imports-aliases.py:45fc8c53aea7b84f06bc4e590cc667678d6073c4c8a1d471177ca2146fb22db2:B303",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 14,
+ "end_line": 14
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B303",
+ "value": "B303"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 14
+ },
+ {
+ "category": "sast",
+ "message": "Pickle library appears to be in use, possible security issue.",
+ "cve": "python/imports/imports-aliases.py:5f200d47291e7bbd8352db23019b85453ca048dd98ea0c291260fa7d009963a4:B301",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 15,
+ "end_line": 15
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B301",
+ "value": "B301"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/imports/imports-aliases.py",
+ "line": 15
+ },
+ {
+ "category": "sast",
+ "name": "ECB mode is insecure",
+ "message": "ECB mode is insecure",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:ECB_MODE",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-ECB_MODE",
+ "value": "ECB_MODE",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 29,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ },
+ {
+ "category": "sast",
+ "name": "Cipher with no integrity",
+ "message": "Cipher with no integrity",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:CIPHER_INTEGRITY",
+ "severity": "Medium",
+ "confidence": "High",
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-CIPHER_INTEGRITY",
+ "value": "CIPHER_INTEGRITY",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY"
+ }
+ ],
+ "priority": "Medium",
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "line": 29,
+ "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY"
+ },
+ {
+ "category": "sast",
+ "message": "Probable insecure usage of temp file/directory.",
+ "cve": "python/hardcoded/hardcoded-tmp.py:63dd4d626855555b816985d82c4614a790462a0a3ada89dc58eb97f9c50f3077:B108",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "start_line": 14,
+ "end_line": 14
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B108",
+ "value": "B108",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "line": 14,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ },
+ {
+ "category": "sast",
+ "message": "Probable insecure usage of temp file/directory.",
+ "cve": "python/hardcoded/hardcoded-tmp.py:4ad6d4c40a8c263fc265f3384724014e0a4f8dd6200af83e51ff120420038031:B108",
+ "severity": "Medium",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "start_line": 10,
+ "end_line": 10
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B108",
+ "value": "B108",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ }
+ ],
+ "priority": "Medium",
+ "file": "python/hardcoded/hardcoded-tmp.py",
+ "line": 10,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with Popen module.",
+ "cve": "python/imports/imports-aliases.py:2c3e1fa1e54c3c6646e8bcfaee2518153c6799b77587ff8d9a7b0631f6d34785:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 1,
+ "end_line": 1
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 1
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with pickle module.",
+ "cve": "python/imports/imports.py:af58d07f6ad519ef5287fcae65bf1a6999448a1a3a8bc1ac2a11daa80d0b96bf:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports.py",
+ "start_line": 2,
+ "end_line": 2
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports.py",
+ "line": 2
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with subprocess module.",
+ "cve": "python/imports/imports.py:8de9bc98029d212db530785a5f6780cfa663548746ff228ab8fa96c5bb82f089:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports.py",
+ "start_line": 4,
+ "end_line": 4
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports.py",
+ "line": 4
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'blerg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:97c30f1d76d2a88913e3ce9ae74087874d740f87de8af697a9c455f01119f633:B106",
+ "severity": "Low",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 22,
+ "end_line": 22
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B106",
+ "value": "B106",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 22,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'root'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:7431c73a0bc16d94ece2a2e75ef38f302574d42c37ac0c3c38ad0b3bf8a59f10:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 5,
+ "end_line": 5
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 5,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: ''",
+ "cve": "python/hardcoded/hardcoded-passwords.py:d2d1857c27caedd49c57bfbcdc23afcc92bd66a22701fcdc632869aab4ca73ee:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 9,
+ "end_line": 9
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 9,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'ajklawejrkl42348swfgkg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:fb3866215a61393a5c9c32a3b60e2058171a23219c353f722cbd3567acab21d2:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 13,
+ "end_line": 13
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 13,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'blerg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:63c62a8b7e1e5224439bd26b28030585ac48741e28ca64561a6071080c560a5f:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 23,
+ "end_line": 23
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 23,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ },
+ {
+ "category": "sast",
+ "message": "Possible hardcoded password: 'blerg'",
+ "cve": "python/hardcoded/hardcoded-passwords.py:4311b06d08df8fa58229b341c531da8e1a31ec4520597bdff920cd5c098d86f9:B105",
+ "severity": "Low",
+ "confidence": "Medium",
+ "location": {
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "start_line": 24,
+ "end_line": 24
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B105",
+ "value": "B105",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/hardcoded/hardcoded-passwords.py",
+ "line": 24,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with subprocess module.",
+ "cve": "python/imports/imports-function.py:5858400c2f39047787702de44d03361ef8d954c9d14bd54ee1c2bef9e6a7df93:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-function.py",
+ "start_line": 4,
+ "end_line": 4
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-function.py",
+ "line": 4
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with pickle module.",
+ "cve": "python/imports/imports-function.py:dbda3cf4190279d30e0aad7dd137eca11272b0b225e8af4e8bf39682da67d956:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-function.py",
+ "start_line": 2,
+ "end_line": 2
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-function.py",
+ "line": 2
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with Popen module.",
+ "cve": "python/imports/imports-from.py:eb8a0db9cd1a8c1ab39a77e6025021b1261cc2a0b026b2f4a11fca4e0636d8dd:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-from.py",
+ "start_line": 7,
+ "end_line": 7
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-from.py",
+ "line": 7
+ },
+ {
+ "category": "sast",
+ "message": "subprocess call with shell=True seems safe, but may be changed in the future, consider rewriting without shell",
+ "cve": "python/imports/imports-aliases.py:f99f9721e27537fbcb6699a4cf39c6740d6234d2c6f06cfc2d9ea977313c483d:B602",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 9,
+ "end_line": 9
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B602",
+ "value": "B602",
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 9,
+ "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html"
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with subprocess module.",
+ "cve": "python/imports/imports-from.py:332a12ab1146698f614a905ce6a6a5401497a12281aef200e80522711c69dcf4:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-from.py",
+ "start_line": 6,
+ "end_line": 6
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-from.py",
+ "line": 6
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with Popen module.",
+ "cve": "python/imports/imports-from.py:0a48de4a3d5348853a03666cb574697e3982998355e7a095a798bd02a5947276:B404",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-from.py",
+ "start_line": 1,
+ "end_line": 2
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B404",
+ "value": "B404"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-from.py",
+ "line": 1
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with pickle module.",
+ "cve": "python/imports/imports-aliases.py:51b71661dff994bde3529639a727a678c8f5c4c96f00d300913f6d5be1bbdf26:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 7,
+ "end_line": 8
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 7
+ },
+ {
+ "category": "sast",
+ "message": "Consider possible security implications associated with loads module.",
+ "cve": "python/imports/imports-aliases.py:6ff02aeb3149c01ab68484d794a94f58d5d3e3bb0d58557ef4153644ea68ea54:B403",
+ "severity": "Low",
+ "confidence": "High",
+ "location": {
+ "file": "python/imports/imports-aliases.py",
+ "start_line": 6,
+ "end_line": 6
+ },
+ "identifiers": [
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B403",
+ "value": "B403"
+ }
+ ],
+ "priority": "Low",
+ "file": "python/imports/imports-aliases.py",
+ "line": 6
+ },
+ {
+ "category": "sast",
+ "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
+ "cve": "c/subdir/utils.c:b466873101951fe96e1332f6728eb7010acbbd5dfc3b65d7d53571d091a06d9e:CWE-119!/CWE-120",
+ "confidence": "Low",
+ "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
+ "location": {
+ "file": "c/subdir/utils.c",
+ "start_line": 4
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - char",
+ "value": "char"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-119",
+ "value": "119",
+ "url": "https://cwe.mitre.org/data/definitions/119.html"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-120",
+ "value": "120",
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ],
+ "file": "c/subdir/utils.c",
+ "line": 4,
+ "url": "https://cwe.mitre.org/data/definitions/119.html"
+ },
+ {
+ "category": "sast",
+ "message": "Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents? (CWE-362)",
+ "cve": "c/subdir/utils.c:bab681140fcc8fc3085b6bba74081b44ea145c1c98b5e70cf19ace2417d30770:CWE-362",
+ "confidence": "Low",
+ "location": {
+ "file": "c/subdir/utils.c",
+ "start_line": 8
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - fopen",
+ "value": "fopen"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-362",
+ "value": "362",
+ "url": "https://cwe.mitre.org/data/definitions/362.html"
+ }
+ ],
+ "file": "c/subdir/utils.c",
+ "line": 8,
+ "url": "https://cwe.mitre.org/data/definitions/362.html"
+ },
+ {
+ "category": "sast",
+ "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
+ "cve": "cplusplus/src/hello.cpp:c8c6dd0afdae6814194cf0930b719f757ab7b379cf8f261e7f4f9f2f323a818a:CWE-119!/CWE-120",
+ "confidence": "Low",
+ "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
+ "location": {
+ "file": "cplusplus/src/hello.cpp",
+ "start_line": 6
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - char",
+ "value": "char"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-119",
+ "value": "119",
+ "url": "https://cwe.mitre.org/data/definitions/119.html"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-120",
+ "value": "120",
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ],
+ "file": "cplusplus/src/hello.cpp",
+ "line": 6,
+ "url": "https://cwe.mitre.org/data/definitions/119.html"
+ },
+ {
+ "category": "sast",
+ "message": "Does not check for buffer overflows when copying to destination [MS-banned] (CWE-120)",
+ "cve": "cplusplus/src/hello.cpp:331c04062c4fe0c7c486f66f59e82ad146ab33cdd76ae757ca41f392d568cbd0:CWE-120",
+ "confidence": "Low",
+ "solution": "Consider using snprintf, strcpy_s, or strlcpy (warning: strncpy easily misused)",
+ "location": {
+ "file": "cplusplus/src/hello.cpp",
+ "start_line": 7
+ },
+ "identifiers": [
+ {
+ "type": "flawfinder_func_name",
+ "name": "Flawfinder - strcpy",
+ "value": "strcpy"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-120",
+ "value": "120",
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ],
+ "file": "cplusplus/src/hello.cpp",
+ "line": 7,
+ "url": "https://cwe.mitre.org/data/definitions/120.html"
+ }
+ ]
+}
diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace
index ebd2853e558..ad01c113062 100644
--- a/spec/fixtures/trace/sample_trace
+++ b/spec/fixtures/trace/sample_trace
@@ -2592,7 +2592,7 @@ TeamcityService
should not validate that :username cannot be empty/falsy
should not validate that :password cannot be empty/falsy
Callbacks
- before_update :reset_password
+ before_validation :reset_password
saves password if new url is set together with password when no password was previously set
when a password was previously set
resets password if url changed
diff --git a/spec/frontend/__helpers__/mock_dom_observer.js b/spec/frontend/__helpers__/mock_dom_observer.js
index 1b93b81535d..dd26b594ad9 100644
--- a/spec/frontend/__helpers__/mock_dom_observer.js
+++ b/spec/frontend/__helpers__/mock_dom_observer.js
@@ -52,7 +52,7 @@ class MockIntersectionObserver extends MockObserver {
* const { trigger: triggerMutate } = useMockMutationObserver();
*
* it('test', () => {
- * trigger(el, { options: { childList: true }, entry: { } });
+ * triggerMutate(el, { options: { childList: true }, entry: { } });
* });
* })
* ```
@@ -60,33 +60,31 @@ class MockIntersectionObserver extends MockObserver {
* @param {String} key
*/
const useMockObserver = (key, createMock) => {
- let mockObserver;
+ let mockObservers = [];
let origObserver;
beforeEach(() => {
origObserver = global[key];
global[key] = jest.fn().mockImplementation((...args) => {
- mockObserver = createMock(...args);
+ const mockObserver = createMock(...args);
+ mockObservers.push(mockObserver);
return mockObserver;
});
});
afterEach(() => {
- mockObserver = null;
+ mockObservers.forEach((x) => x.disconnect());
+ mockObservers = [];
global[key] = origObserver;
});
const trigger = (...args) => {
- if (!mockObserver) {
- return;
- }
-
- mockObserver.$_triggerObserve(...args);
+ mockObservers.forEach((observer) => {
+ observer.$_triggerObserve(...args);
+ });
};
- const observersCount = () => mockObserver.$_observers.length;
-
- return { trigger, observersCount };
+ return { trigger };
};
export const useMockIntersectionObserver = () =>
diff --git a/spec/frontend/__helpers__/mock_window_location_helper.js b/spec/frontend/__helpers__/mock_window_location_helper.js
index 08a28fbbbd6..3755778e5c1 100644
--- a/spec/frontend/__helpers__/mock_window_location_helper.js
+++ b/spec/frontend/__helpers__/mock_window_location_helper.js
@@ -10,7 +10,7 @@
*/
const useMockLocation = (fn) => {
const origWindowLocation = window.location;
- let currentWindowLocation;
+ let currentWindowLocation = origWindowLocation;
Object.defineProperty(window, 'location', {
get: () => currentWindowLocation,
diff --git a/spec/frontend/__helpers__/set_window_location_helper.js b/spec/frontend/__helpers__/set_window_location_helper.js
index a94e73762c9..573a089f111 100644
--- a/spec/frontend/__helpers__/set_window_location_helper.js
+++ b/spec/frontend/__helpers__/set_window_location_helper.js
@@ -1,40 +1,53 @@
/**
- * setWindowLocation allows for setting `window.location`
- * (doing so directly is causing an error in jsdom)
+ * setWindowLocation allows for setting `window.location` within Jest.
*
- * Example usage:
- * assert(window.location.hash === undefined);
- * setWindowLocation('http://example.com#foo')
- * assert(window.location.hash === '#foo');
+ * The jsdom environment at the time of writing does not support changing the
+ * current location (see
+ * https://github.com/jsdom/jsdom/blob/16.4.0/lib/jsdom/living/window/navigation.js#L76),
+ * hence this helper.
*
- * More information:
- * https://github.com/facebook/jest/issues/890
+ * This helper mutates the current `window.location` very similarly to how
+ * a direct assignment to `window.location.href` would in a browser (but
+ * without the navigation/reload behaviour). For instance:
*
- * @param url
+ * - Set the full href by passing an absolute URL, e.g.:
+ *
+ * setWindowLocation('https://gdk.test');
+ * // window.location.href is now 'https://gdk.test'
+ *
+ * - Set the path, search and/or hash components by passing a relative URL:
+ *
+ * setWindowLocation('/foo/bar');
+ * // window.location.href is now 'http://test.host/foo/bar'
+ *
+ * setWindowLocation('?foo=bar');
+ * // window.location.href is now 'http://test.host/?foo=bar'
+ *
+ * setWindowLocation('#foo');
+ * // window.location.href is now 'http://test.host/#foo'
+ *
+ * setWindowLocation('/a/b/foo.html?bar=1#qux');
+ * // window.location.href is now 'http://test.host/a/b/foo.html?bar=1#qux
+ *
+ * Both approaches also automatically update the rest of the properties on
+ * `window.locaton`. For instance:
+ *
+ * setWindowLocation('http://test.host/a/b/foo.html?bar=1#qux');
+ * // window.location.origin is now 'http://test.host'
+ * // window.location.pathname is now '/a/b/foo.html'
+ * // window.location.search is now '?bar=1'
+ * // window.location.searchParams is now { bar: 1 }
+ * // window.location.hash is now '#qux'
+ *
+ * @param {string} url A string representing an absolute or relative URL.
+ * @returns {undefined}
*/
export default function setWindowLocation(url) {
- const parsedUrl = new URL(url);
+ if (typeof url !== 'string') {
+ throw new TypeError(`Expected string; got ${url} (${typeof url})`);
+ }
- const newLocationValue = [
- 'hash',
- 'host',
- 'hostname',
- 'href',
- 'origin',
- 'pathname',
- 'port',
- 'protocol',
- 'search',
- ].reduce(
- (location, prop) => ({
- ...location,
- [prop]: parsedUrl[prop],
- }),
- {},
- );
+ const newUrl = new URL(url, window.location.href);
- Object.defineProperty(window, 'location', {
- value: newLocationValue,
- writable: true,
- });
+ global.jsdom.reconfigure({ url: newUrl.href });
}
diff --git a/spec/frontend/__helpers__/set_window_location_helper_spec.js b/spec/frontend/__helpers__/set_window_location_helper_spec.js
index 98f26854822..c0f3debddbc 100644
--- a/spec/frontend/__helpers__/set_window_location_helper_spec.js
+++ b/spec/frontend/__helpers__/set_window_location_helper_spec.js
@@ -1,40 +1,133 @@
import setWindowLocation from './set_window_location_helper';
+import { TEST_HOST } from './test_constants';
-describe('setWindowLocation', () => {
- const originalLocation = window.location;
+describe('helpers/set_window_location_helper', () => {
+ const originalLocation = window.location.href;
- afterEach(() => {
- window.location = originalLocation;
+ beforeEach(() => {
+ setWindowLocation(originalLocation);
});
- it.each`
- url | property | value
- ${'https://gitlab.com#foo'} | ${'hash'} | ${'#foo'}
- ${'http://gitlab.com'} | ${'host'} | ${'gitlab.com'}
- ${'http://gitlab.org'} | ${'hostname'} | ${'gitlab.org'}
- ${'http://gitlab.org/foo#bar'} | ${'href'} | ${'http://gitlab.org/foo#bar'}
- ${'http://gitlab.com'} | ${'origin'} | ${'http://gitlab.com'}
- ${'http://gitlab.com/foo/bar/baz'} | ${'pathname'} | ${'/foo/bar/baz'}
- ${'https://gitlab.com'} | ${'protocol'} | ${'https:'}
- ${'http://gitlab.com#foo'} | ${'protocol'} | ${'http:'}
- ${'http://gitlab.com:8080'} | ${'port'} | ${'8080'}
- ${'http://gitlab.com?foo=bar&bar=foo'} | ${'search'} | ${'?foo=bar&bar=foo'}
- `(
- 'sets "window.location.$property" to be "$value" when called with: "$url"',
- ({ url, property, value }) => {
- expect(window.location).toBe(originalLocation);
-
- setWindowLocation(url);
-
- expect(window.location[property]).toBe(value);
- },
- );
-
- it.each([null, 1, undefined, false, '', 'gitlab.com'])(
- 'throws an error when called with an invalid url: "%s"',
- (invalidUrl) => {
- expect(() => setWindowLocation(invalidUrl)).toThrow(/Invalid URL/);
- expect(window.location).toBe(originalLocation);
- },
- );
+ describe('setWindowLocation', () => {
+ describe('given a complete URL', () => {
+ it.each`
+ url | property | value
+ ${'https://gitlab.com#foo'} | ${'hash'} | ${'#foo'}
+ ${'http://gitlab.com'} | ${'host'} | ${'gitlab.com'}
+ ${'http://gitlab.org'} | ${'hostname'} | ${'gitlab.org'}
+ ${'http://gitlab.org/foo#bar'} | ${'href'} | ${'http://gitlab.org/foo#bar'}
+ ${'http://gitlab.com'} | ${'origin'} | ${'http://gitlab.com'}
+ ${'http://gitlab.com/foo/bar/baz'} | ${'pathname'} | ${'/foo/bar/baz'}
+ ${'https://gitlab.com'} | ${'protocol'} | ${'https:'}
+ ${'ftp://gitlab.com#foo'} | ${'protocol'} | ${'ftp:'}
+ ${'http://gitlab.com:8080'} | ${'port'} | ${'8080'}
+ ${'http://gitlab.com?foo=bar&bar=foo'} | ${'search'} | ${'?foo=bar&bar=foo'}
+ `(
+ 'sets "window.location.$property" to be "$value" when called with: "$url"',
+ ({ url, property, value }) => {
+ expect(window.location.href).toBe(originalLocation);
+
+ setWindowLocation(url);
+
+ expect(window.location[property]).toBe(value);
+ },
+ );
+ });
+
+ describe('given a partial URL', () => {
+ it.each`
+ partialURL | href
+ ${'//foo.test:3000/'} | ${'http://foo.test:3000/'}
+ ${'/foo/bar'} | ${`${originalLocation}foo/bar`}
+ ${'foo/bar'} | ${`${originalLocation}foo/bar`}
+ ${'?foo=bar'} | ${`${originalLocation}?foo=bar`}
+ ${'#a-thing'} | ${`${originalLocation}#a-thing`}
+ `('$partialURL sets location.href to $href', ({ partialURL, href }) => {
+ expect(window.location.href).toBe(originalLocation);
+
+ setWindowLocation(partialURL);
+
+ expect(window.location.href).toBe(href);
+ });
+ });
+
+ describe('relative path', () => {
+ describe.each`
+ initialHref | path | newHref
+ ${'https://gdk.test/foo/bar'} | ${'/qux'} | ${'https://gdk.test/qux'}
+ ${'https://gdk.test/foo/bar/'} | ${'/qux'} | ${'https://gdk.test/qux'}
+ ${'https://gdk.test/foo/bar'} | ${'qux'} | ${'https://gdk.test/foo/qux'}
+ ${'https://gdk.test/foo/bar/'} | ${'qux'} | ${'https://gdk.test/foo/bar/qux'}
+ ${'https://gdk.test/foo/bar'} | ${'../qux'} | ${'https://gdk.test/qux'}
+ ${'https://gdk.test/foo/bar/'} | ${'../qux'} | ${'https://gdk.test/foo/qux'}
+ `('when location is $initialHref', ({ initialHref, path, newHref }) => {
+ beforeEach(() => {
+ setWindowLocation(initialHref);
+ });
+
+ it(`${path} sets window.location.href to ${newHref}`, () => {
+ expect(window.location.href).toBe(initialHref);
+
+ setWindowLocation(path);
+
+ expect(window.location.href).toBe(newHref);
+ });
+ });
+ });
+
+ it.each([null, 1, undefined, false, 'https://', 'https:', { foo: 1 }, []])(
+ 'throws an error when called with an invalid url: "%s"',
+ (invalidUrl) => {
+ expect(() => setWindowLocation(invalidUrl)).toThrow();
+ expect(window.location.href).toBe(originalLocation);
+ },
+ );
+
+ describe('affects links', () => {
+ it.each`
+ url | hrefAttr | expectedHref
+ ${'http://gitlab.com/'} | ${'foo'} | ${'http://gitlab.com/foo'}
+ ${'http://gitlab.com/bar/'} | ${'foo'} | ${'http://gitlab.com/bar/foo'}
+ ${'http://gitlab.com/bar/'} | ${'/foo'} | ${'http://gitlab.com/foo'}
+ ${'http://gdk.test:3000/?foo=bar'} | ${'?qux=1'} | ${'http://gdk.test:3000/?qux=1'}
+ ${'https://gdk.test:3000/?foo=bar'} | ${'//other.test'} | ${'https://other.test/'}
+ `(
+ 'given $url, <a href="$hrefAttr"> points to $expectedHref',
+ ({ url, hrefAttr, expectedHref }) => {
+ setWindowLocation(url);
+
+ const link = document.createElement('a');
+ link.setAttribute('href', hrefAttr);
+
+ expect(link.href).toBe(expectedHref);
+ },
+ );
+ });
+ });
+
+ // This set of tests relies on Jest executing tests in source order, which is
+ // at the time of writing the only order they will execute, by design.
+ // See https://github.com/facebook/jest/issues/4386 for more details.
+ describe('window.location resetting by global beforeEach', () => {
+ const overridden = 'https://gdk.test:1234/';
+ const initial = `${TEST_HOST}/`;
+
+ it('works before an override', () => {
+ expect(window.location.href).toBe(initial);
+ });
+
+ describe('overriding', () => {
+ beforeEach(() => {
+ setWindowLocation(overridden);
+ });
+
+ it('works', () => {
+ expect(window.location.href).toBe(overridden);
+ });
+ });
+
+ it('works after an override', () => {
+ expect(window.location.href).toBe(initial);
+ });
+ });
});
diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_callout_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_callout_spec.js
new file mode 100644
index 00000000000..ee14e002f1b
--- /dev/null
+++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_callout_spec.js
@@ -0,0 +1,67 @@
+import { GlBanner } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import DevopsScoreCallout from '~/analytics/devops_report/components/devops_score_callout.vue';
+import { INTRO_COOKIE_KEY } from '~/analytics/devops_report/constants';
+import * as utils from '~/lib/utils/common_utils';
+import { devopsReportDocsPath, devopsScoreIntroImagePath } from '../mock_data';
+
+describe('DevopsScoreCallout', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(DevopsScoreCallout, {
+ provide: {
+ devopsReportDocsPath,
+ devopsScoreIntroImagePath,
+ },
+ });
+ };
+
+ const findBanner = () => wrapper.findComponent(GlBanner);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with no cookie set', () => {
+ beforeEach(() => {
+ utils.setCookie = jest.fn();
+
+ createComponent();
+ });
+
+ it('displays the banner', () => {
+ expect(findBanner().exists()).toBe(true);
+ });
+
+ it('does not call setCookie', () => {
+ expect(utils.setCookie).not.toHaveBeenCalled();
+ });
+
+ describe('when the close button is clicked', () => {
+ beforeEach(() => {
+ findBanner().vm.$emit('close');
+ });
+
+ it('sets the dismissed cookie', () => {
+ expect(utils.setCookie).toHaveBeenCalledWith(INTRO_COOKIE_KEY, 'true');
+ });
+
+ it('hides the banner', () => {
+ expect(findBanner().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('with the dismissed cookie set', () => {
+ beforeEach(() => {
+ jest.spyOn(utils, 'getCookie').mockReturnValue('true');
+
+ createComponent();
+ });
+
+ it('hides the banner', () => {
+ expect(findBanner().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
index 7c20bbe21c8..8f8dac977de 100644
--- a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
+++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
@@ -1,14 +1,10 @@
-import { GlTable, GlBadge, GlEmptyState, GlLink } from '@gitlab/ui';
+import { GlTable, GlBadge, GlEmptyState } from '@gitlab/ui';
import { GlSingleStat } from '@gitlab/ui/dist/charts';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import DevopsScore from '~/analytics/devops_report/components/devops_score.vue';
-import {
- devopsScoreMetricsData,
- devopsReportDocsPath,
- noDataImagePath,
- devopsScoreTableHeaders,
-} from '../mock_data';
+import DevopsScoreCallout from '~/analytics/devops_report/components/devops_score_callout.vue';
+import { devopsScoreMetricsData, noDataImagePath, devopsScoreTableHeaders } from '../mock_data';
describe('DevopsScore', () => {
let wrapper;
@@ -18,7 +14,6 @@ describe('DevopsScore', () => {
mount(DevopsScore, {
provide: {
devopsScoreMetrics,
- devopsReportDocsPath,
noDataImagePath,
},
}),
@@ -30,12 +25,19 @@ describe('DevopsScore', () => {
const findCol = (testId) => findTable().find(`[data-testid="${testId}"]`);
const findUsageCol = () => findCol('usageCol');
const findDevopsScoreApp = () => wrapper.findByTestId('devops-score-app');
+ const bannerExists = () => wrapper.findComponent(DevopsScoreCallout).exists();
+ const findDocsLink = () =>
+ wrapper.findByRole('link', { name: 'See example DevOps Score page in our documentation.' });
describe('with no data', () => {
beforeEach(() => {
createComponent({ devopsScoreMetrics: {} });
});
+ it('includes the DevopsScoreCallout component ', () => {
+ expect(bannerExists()).toBe(true);
+ });
+
describe('empty state', () => {
it('displays the empty state', () => {
expect(findEmptyState().exists()).toBe(true);
@@ -48,7 +50,10 @@ describe('DevopsScore', () => {
});
it('contains a link to the feature documentation', () => {
- expect(wrapper.findComponent(GlLink).exists()).toBe(true);
+ expect(findDocsLink().exists()).toBe(true);
+ expect(findDocsLink().attributes('href')).toBe(
+ '/help/user/admin_area/analytics/dev_ops_report',
+ );
});
});
@@ -62,6 +67,10 @@ describe('DevopsScore', () => {
createComponent();
});
+ it('includes the DevopsScoreCallout component ', () => {
+ expect(bannerExists()).toBe(true);
+ });
+
it('does not display the empty state', () => {
expect(findEmptyState().exists()).toBe(false);
});
diff --git a/spec/frontend/admin/analytics/devops_score/mock_data.js b/spec/frontend/admin/analytics/devops_score/mock_data.js
index ae0c01a2661..e8f8b778ffa 100644
--- a/spec/frontend/admin/analytics/devops_score/mock_data.js
+++ b/spec/frontend/admin/analytics/devops_score/mock_data.js
@@ -44,3 +44,5 @@ export const devopsScoreMetricsData = {
export const devopsReportDocsPath = 'docs-path';
export const noDataImagePath = 'image-path';
+
+export const devopsScoreIntroImagePath = 'image-path';
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
index 18339164d5a..4bb22feb913 100644
--- a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
+++ b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
@@ -192,22 +192,27 @@ describe('Signup Form', () => {
describe('form submit button confirmation modal for side-effect of adding possibly unwanted new users', () => {
it.each`
- requireAdminApprovalAction | userCapAction | buttonEffect
- ${'unchanged from true'} | ${'unchanged'} | ${'submits form'}
- ${'unchanged from false'} | ${'unchanged'} | ${'submits form'}
- ${'toggled off'} | ${'unchanged'} | ${'shows confirmation modal'}
- ${'toggled on'} | ${'unchanged'} | ${'submits form'}
- ${'unchanged from false'} | ${'increased'} | ${'shows confirmation modal'}
- ${'unchanged from true'} | ${'increased'} | ${'shows confirmation modal'}
- ${'toggled off'} | ${'increased'} | ${'shows confirmation modal'}
- ${'toggled on'} | ${'increased'} | ${'shows confirmation modal'}
- ${'toggled on'} | ${'decreased'} | ${'submits form'}
- ${'unchanged from false'} | ${'changed from limited to unlimited'} | ${'shows confirmation modal'}
- ${'unchanged from false'} | ${'changed from unlimited to limited'} | ${'submits form'}
- ${'unchanged from false'} | ${'unchanged from unlimited'} | ${'submits form'}
+ requireAdminApprovalAction | userCapAction | pendingUserCount | buttonEffect
+ ${'unchanged from true'} | ${'unchanged'} | ${0} | ${'submits form'}
+ ${'unchanged from false'} | ${'unchanged'} | ${0} | ${'submits form'}
+ ${'toggled off'} | ${'unchanged'} | ${1} | ${'shows confirmation modal'}
+ ${'toggled off'} | ${'unchanged'} | ${0} | ${'submits form'}
+ ${'toggled on'} | ${'unchanged'} | ${0} | ${'submits form'}
+ ${'unchanged from false'} | ${'increased'} | ${1} | ${'shows confirmation modal'}
+ ${'unchanged from true'} | ${'increased'} | ${0} | ${'submits form'}
+ ${'toggled off'} | ${'increased'} | ${1} | ${'shows confirmation modal'}
+ ${'toggled off'} | ${'increased'} | ${0} | ${'submits form'}
+ ${'toggled on'} | ${'increased'} | ${1} | ${'shows confirmation modal'}
+ ${'toggled on'} | ${'increased'} | ${0} | ${'submits form'}
+ ${'toggled on'} | ${'decreased'} | ${0} | ${'submits form'}
+ ${'toggled on'} | ${'decreased'} | ${1} | ${'submits form'}
+ ${'unchanged from false'} | ${'changed from limited to unlimited'} | ${1} | ${'shows confirmation modal'}
+ ${'unchanged from false'} | ${'changed from limited to unlimited'} | ${0} | ${'submits form'}
+ ${'unchanged from false'} | ${'changed from unlimited to limited'} | ${0} | ${'submits form'}
+ ${'unchanged from false'} | ${'unchanged from unlimited'} | ${0} | ${'submits form'}
`(
- '$buttonEffect if require admin approval for new sign-ups is $requireAdminApprovalAction and the user cap is $userCapAction',
- async ({ requireAdminApprovalAction, userCapAction, buttonEffect }) => {
+ '$buttonEffect if require admin approval for new sign-ups is $requireAdminApprovalAction and the user cap is $userCapAction and pending user count is $pendingUserCount',
+ async ({ requireAdminApprovalAction, userCapAction, pendingUserCount, buttonEffect }) => {
let isModalDisplayed;
switch (buttonEffect) {
@@ -224,7 +229,9 @@ describe('Signup Form', () => {
const isFormSubmittedWhenClickingFormSubmitButton = !isModalDisplayed;
- const injectedProps = {};
+ const injectedProps = {
+ pendingUserCount,
+ };
const USER_CAP_DEFAULT = 5;
@@ -310,6 +317,7 @@ describe('Signup Form', () => {
await mountComponent({
injectedProps: {
newUserSignupsCap: INITIAL_USER_CAP,
+ pendingUserCount: 5,
},
stubs: { GlButton, GlModal: stubComponent(GlModal) },
});
diff --git a/spec/frontend/admin/signup_restrictions/mock_data.js b/spec/frontend/admin/signup_restrictions/mock_data.js
index 624a5614c9c..135fc8caae0 100644
--- a/spec/frontend/admin/signup_restrictions/mock_data.js
+++ b/spec/frontend/admin/signup_restrictions/mock_data.js
@@ -17,6 +17,7 @@ export const rawMockData = {
supportedSyntaxLinkUrl: '/supported/syntax/link',
emailRestrictions: 'user1@domain.com, user2@domain.com',
afterSignUpText: 'Congratulations on your successful sign-up!',
+ pendingUserCount: '0',
};
export const mockData = {
@@ -38,4 +39,5 @@ export const mockData = {
supportedSyntaxLinkUrl: '/supported/syntax/link',
emailRestrictions: 'user1@domain.com, user2@domain.com',
afterSignUpText: 'Congratulations on your successful sign-up!',
+ pendingUserCount: '0',
};
diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js
index 67d9bac8580..fd05b08a3fb 100644
--- a/spec/frontend/admin/users/components/actions/actions_spec.js
+++ b/spec/frontend/admin/users/components/actions/actions_spec.js
@@ -5,8 +5,8 @@ import { nextTick } from 'vue';
import Actions from '~/admin/users/components/actions';
import SharedDeleteAction from '~/admin/users/components/actions/shared/shared_delete_action.vue';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
-
import { CONFIRMATION_ACTIONS, DELETE_ACTIONS } from '../../constants';
+import { paths } from '../../mock_data';
describe('Action components', () => {
let wrapper;
@@ -47,32 +47,33 @@ describe('Action components', () => {
describe('DELETE_ACTION_COMPONENTS', () => {
const oncallSchedules = [{ name: 'schedule1' }, { name: 'schedule2' }];
- it.each(DELETE_ACTIONS)('renders a dropdown item for "%s"', async (action) => {
- initComponent({
- component: Actions[capitalizeFirstCharacter(action)],
- props: {
- username: 'John Doe',
- paths: {
- delete: '/delete',
- block: '/block',
+
+ it.each(DELETE_ACTIONS.map((action) => [action, paths[action]]))(
+ 'renders a dropdown item for "%s"',
+ async (action, expectedPath) => {
+ initComponent({
+ component: Actions[capitalizeFirstCharacter(action)],
+ props: {
+ username: 'John Doe',
+ paths,
+ oncallSchedules,
},
- oncallSchedules,
- },
- stubs: { SharedDeleteAction },
- });
+ stubs: { SharedDeleteAction },
+ });
- await nextTick();
+ await nextTick();
- const sharedAction = wrapper.find(SharedDeleteAction);
+ const sharedAction = wrapper.find(SharedDeleteAction);
- expect(sharedAction.attributes('data-block-user-url')).toBe('/block');
- expect(sharedAction.attributes('data-delete-user-url')).toBe('/delete');
- expect(sharedAction.attributes('data-gl-modal-action')).toBe(kebabCase(action));
- expect(sharedAction.attributes('data-username')).toBe('John Doe');
- expect(sharedAction.attributes('data-oncall-schedules')).toBe(
- JSON.stringify(oncallSchedules),
- );
- expect(findDropdownItem().exists()).toBe(true);
- });
+ expect(sharedAction.attributes('data-block-user-url')).toBe(paths.block);
+ expect(sharedAction.attributes('data-delete-user-url')).toBe(expectedPath);
+ expect(sharedAction.attributes('data-gl-modal-action')).toBe(kebabCase(action));
+ expect(sharedAction.attributes('data-username')).toBe('John Doe');
+ expect(sharedAction.attributes('data-oncall-schedules')).toBe(
+ JSON.stringify(oncallSchedules),
+ );
+ expect(findDropdownItem().exists()).toBe(true);
+ },
+ );
});
});
diff --git a/spec/frontend/admin/users/components/user_date_spec.js b/spec/frontend/admin/users/components/user_date_spec.js
index 1a2f2938db5..af262c6d3f0 100644
--- a/spec/frontend/admin/users/components/user_date_spec.js
+++ b/spec/frontend/admin/users/components/user_date_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import UserDate from '~/vue_shared/components/user_date.vue';
+import { ISO_SHORT_FORMAT } from '~/vue_shared/constants';
import { users } from '../mock_data';
const mockDate = users[0].createdAt;
@@ -22,12 +23,15 @@ describe('FormatDate component', () => {
});
it.each`
- date | output
- ${mockDate} | ${'13 Nov, 2020'}
- ${null} | ${'Never'}
- ${undefined} | ${'Never'}
- `('renders $date as $output', ({ date, output }) => {
- initComponent({ date });
+ date | dateFormat | output
+ ${mockDate} | ${undefined} | ${'13 Nov, 2020'}
+ ${null} | ${undefined} | ${'Never'}
+ ${undefined} | ${undefined} | ${'Never'}
+ ${mockDate} | ${ISO_SHORT_FORMAT} | ${'2020-11-13'}
+ ${null} | ${ISO_SHORT_FORMAT} | ${'Never'}
+ ${undefined} | ${ISO_SHORT_FORMAT} | ${'Never'}
+ `('renders $date as $output', ({ date, dateFormat, output }) => {
+ initComponent({ date, dateFormat });
expect(wrapper.text()).toBe(output);
});
diff --git a/spec/frontend/admin/users/mock_data.js b/spec/frontend/admin/users/mock_data.js
index ded3e6f7edf..73fa73c0b47 100644
--- a/spec/frontend/admin/users/mock_data.js
+++ b/spec/frontend/admin/users/mock_data.js
@@ -30,7 +30,7 @@ export const paths = {
activate: '/admin/users/id/activate',
unlock: '/admin/users/id/unlock',
delete: '/admin/users/id',
- deleteWithContributions: '/admin/users/id',
+ deleteWithContributions: '/admin/users/id?hard_delete=true',
adminUser: '/admin/users/id',
ban: '/admin/users/id/ban',
unban: '/admin/users/id/unban',
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index 1c4dde39585..e6a6e01c41c 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -6,7 +6,6 @@ import VueApollo from 'vue-apollo';
import createHttpIntegrationMutation from 'ee_else_ce/alerts_settings/graphql/mutations/create_http_integration.mutation.graphql';
import updateHttpIntegrationMutation from 'ee_else_ce/alerts_settings/graphql/mutations/update_http_integration.mutation.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import IntegrationsList from '~/alerts_settings/components/alerts_integrations_list.vue';
@@ -57,7 +56,6 @@ describe('AlertsSettingsWrapper', () => {
let wrapper;
let fakeApollo;
let destroyIntegrationHandler;
- useMockIntersectionObserver();
const httpMappingData = {
payloadExample: '{"test: : "field"}',
diff --git a/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js b/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js
index 75ef9d9db94..c5c40e9a360 100644
--- a/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js
+++ b/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js
@@ -1,6 +1,6 @@
import { GlEmptyState, GlSprintf } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import ServicePingDisabled from '~/analytics/devops_report/components/service_ping_disabled.vue';
describe('~/analytics/devops_report/components/service_ping_disabled.vue', () => {
@@ -11,21 +11,19 @@ describe('~/analytics/devops_report/components/service_ping_disabled.vue', () =>
});
const createWrapper = ({ isAdmin = false } = {}) => {
- wrapper = shallowMountExtended(ServicePingDisabled, {
+ wrapper = mountExtended(ServicePingDisabled, {
provide: {
isAdmin,
svgPath: TEST_HOST,
- docsLink: TEST_HOST,
primaryButtonPath: TEST_HOST,
},
- stubs: { GlEmptyState, GlSprintf },
});
};
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findMessageForRegularUsers = () => wrapper.findComponent(GlSprintf);
- const findDocsLink = () => wrapper.findByTestId('docs-link');
- const findPowerOnButton = () => wrapper.findByTestId('power-on-button');
+ const findDocsLink = () => wrapper.findByRole('link', { name: 'service ping' });
+ const findPowerOnButton = () => wrapper.findByRole('link', { name: 'Turn on service ping' });
it('renders empty state with provided SVG path', () => {
createWrapper();
@@ -45,7 +43,7 @@ describe('~/analytics/devops_report/components/service_ping_disabled.vue', () =>
it('renders docs link', () => {
expect(findDocsLink().exists()).toBe(true);
- expect(findDocsLink().attributes('href')).toBe(TEST_HOST);
+ expect(findDocsLink().attributes('href')).toBe('/help/development/service_ping/index.md');
});
});
diff --git a/spec/frontend/authentication/two_factor_auth/index_spec.js b/spec/frontend/authentication/two_factor_auth/index_spec.js
index f5345139021..0ff9d60f409 100644
--- a/spec/frontend/authentication/two_factor_auth/index_spec.js
+++ b/spec/frontend/authentication/two_factor_auth/index_spec.js
@@ -1,5 +1,6 @@
import { getByTestId, fireEvent } from '@testing-library/dom';
import { createWrapper } from '@vue/test-utils';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { initRecoveryCodes, initClose2faSuccessMessage } from '~/authentication/two_factor_auth';
import RecoveryCodes from '~/authentication/two_factor_auth/components/recovery_codes.vue';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -53,8 +54,7 @@ describe('initClose2faSuccessMessage', () => {
describe('when alert is closed', () => {
beforeEach(() => {
- delete window.location;
- window.location = new URL(
+ setWindowLocation(
'https://localhost/-/profile/account?two_factor_auth_enabled_successfully=true',
);
diff --git a/spec/frontend/authentication/webauthn/error_spec.js b/spec/frontend/authentication/webauthn/error_spec.js
index 26f1ca5e27d..9b71f77dde2 100644
--- a/spec/frontend/authentication/webauthn/error_spec.js
+++ b/spec/frontend/authentication/webauthn/error_spec.js
@@ -1,3 +1,4 @@
+import setWindowLocation from 'helpers/set_window_location_helper';
import WebAuthnError from '~/authentication/webauthn/error';
describe('WebAuthnError', () => {
@@ -17,19 +18,8 @@ describe('WebAuthnError', () => {
});
describe('SecurityError', () => {
- const { location } = window;
-
- beforeEach(() => {
- delete window.location;
- window.location = {};
- });
-
- afterEach(() => {
- window.location = location;
- });
-
it('returns a descriptive error if https is disabled', () => {
- window.location.protocol = 'http:';
+ setWindowLocation('http://localhost');
const expectedMessage =
'WebAuthn only works with HTTPS-enabled websites. Contact your administrator for more details.';
@@ -39,7 +29,7 @@ describe('WebAuthnError', () => {
});
it('returns a generic error if https is enabled', () => {
- window.location.protocol = 'https:';
+ setWindowLocation('https://localhost');
const expectedMessage = 'There was a problem communicating with your device.';
expect(
diff --git a/spec/frontend/authentication/webauthn/register_spec.js b/spec/frontend/authentication/webauthn/register_spec.js
index 43cd3d7ca34..0f8ea2b635f 100644
--- a/spec/frontend/authentication/webauthn/register_spec.js
+++ b/spec/frontend/authentication/webauthn/register_spec.js
@@ -1,4 +1,5 @@
import $ from 'jquery';
+import setWindowLocation from 'helpers/set_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WebAuthnRegister from '~/authentication/webauthn/register';
import MockWebAuthnDevice from './mock_webauthn_device';
@@ -50,17 +51,14 @@ describe('WebAuthnRegister', () => {
});
describe('when unsupported', () => {
- const { location, PublicKeyCredential } = window;
+ const { PublicKeyCredential } = window;
beforeEach(() => {
- delete window.location;
delete window.credentials;
- window.location = {};
window.PublicKeyCredential = undefined;
});
afterEach(() => {
- window.location = location;
window.PublicKeyCredential = PublicKeyCredential;
});
@@ -69,7 +67,7 @@ describe('WebAuthnRegister', () => {
${false} | ${'WebAuthn only works with HTTPS-enabled websites'}
${true} | ${'Please use a supported browser, e.g. Chrome (67+) or Firefox'}
`('when https is $httpsEnabled', ({ httpsEnabled, expectedText }) => {
- window.location.protocol = httpsEnabled ? 'https:' : 'http:';
+ setWindowLocation(`${httpsEnabled ? 'https:' : 'http:'}//localhost`);
component.start();
expect(findMessage().text()).toContain(expectedText);
diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js
index bce65899c43..e321bb41774 100644
--- a/spec/frontend/blob/components/blob_header_default_actions_spec.js
+++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js
@@ -39,6 +39,9 @@ describe('Blob Header Default Actions', () => {
});
describe('renders', () => {
+ const findCopyButton = () => wrapper.find('[data-testid="copyContentsButton"]');
+ const findViewRawButton = () => wrapper.find('[data-testid="viewRawButton"]');
+
it('gl-button-group component', () => {
expect(btnGroup.exists()).toBe(true);
});
@@ -76,7 +79,14 @@ describe('Blob Header Default Actions', () => {
hasRenderError: true,
});
- expect(wrapper.find('[data-testid="copyContentsButton"]').exists()).toBe(false);
+ expect(findCopyButton().exists()).toBe(false);
+ });
+
+ it('does not render the copy and view raw button if isBinary is set to true', () => {
+ createComponent({ isBinary: true });
+
+ expect(findCopyButton().exists()).toBe(false);
+ expect(findViewRawButton().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/blob/components/blob_header_spec.js b/spec/frontend/blob/components/blob_header_spec.js
index 865e8ab1124..f841785be42 100644
--- a/spec/frontend/blob/components/blob_header_spec.js
+++ b/spec/frontend/blob/components/blob_header_spec.js
@@ -29,6 +29,8 @@ describe('Blob Header Default Actions', () => {
});
describe('rendering', () => {
+ const findDefaultActions = () => wrapper.find(DefaultActions);
+
const slots = {
prepend: 'Foo Prepend',
actions: 'Actions Bar',
@@ -42,7 +44,7 @@ describe('Blob Header Default Actions', () => {
it('renders all components', () => {
createComponent();
expect(wrapper.find(ViewerSwitcher).exists()).toBe(true);
- expect(wrapper.find(DefaultActions).exists()).toBe(true);
+ expect(findDefaultActions().exists()).toBe(true);
expect(wrapper.find(BlobFilepath).exists()).toBe(true);
});
@@ -100,7 +102,13 @@ describe('Blob Header Default Actions', () => {
hasRenderError: true,
},
);
- expect(wrapper.find(DefaultActions).props('hasRenderError')).toBe(true);
+ expect(findDefaultActions().props('hasRenderError')).toBe(true);
+ });
+
+ it('passes the correct isBinary value to default actions when viewing a binary file', () => {
+ createComponent({}, {}, { isBinary: true });
+
+ expect(findDefaultActions().props('isBinary')).toBe(true);
});
});
diff --git a/spec/frontend/blob/csv/csv_viewer_spec.js b/spec/frontend/blob/csv/csv_viewer_spec.js
index abb914b8f57..17973c709c1 100644
--- a/spec/frontend/blob/csv/csv_viewer_spec.js
+++ b/spec/frontend/blob/csv/csv_viewer_spec.js
@@ -1,8 +1,9 @@
-import { GlAlert, GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { GlLoadingIcon, GlTable } from '@gitlab/ui';
import { getAllByRole } from '@testing-library/dom';
import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import CSVViewer from '~/blob/csv/csv_viewer.vue';
+import CsvViewer from '~/blob/csv/csv_viewer.vue';
+import PapaParseAlert from '~/vue_shared/components/papa_parse_alert.vue';
const validCsv = 'one,two,three';
const brokenCsv = '{\n "json": 1,\n "key": [1, 2, 3]\n}';
@@ -11,7 +12,7 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
let wrapper;
const createComponent = ({ csv = validCsv, mountFunction = shallowMount } = {}) => {
- wrapper = mountFunction(CSVViewer, {
+ wrapper = mountFunction(CsvViewer, {
propsData: {
csv,
},
@@ -20,7 +21,7 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
const findCsvTable = () => wrapper.findComponent(GlTable);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findAlert = () => wrapper.findComponent(GlAlert);
+ const findAlert = () => wrapper.findComponent(PapaParseAlert);
afterEach(() => {
wrapper.destroy();
@@ -35,12 +36,12 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
});
describe('when the CSV contains errors', () => {
- it('should render alert', async () => {
+ it('should render alert with correct props', async () => {
createComponent({ csv: brokenCsv });
await nextTick;
expect(findAlert().props()).toMatchObject({
- variant: 'danger',
+ papaParseErrors: [{ code: 'UndetectableDelimiter' }],
});
});
});
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
index 6a24b76abc8..705c4630a68 100644
--- a/spec/frontend/blob/viewer/index_spec.js
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -3,7 +3,7 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { setTestTimeout } from 'helpers/timeout';
-import BlobViewer from '~/blob/viewer/index';
+import { BlobViewer } from '~/blob/viewer/index';
import axios from '~/lib/utils/axios_utils';
const execImmediately = (callback) => {
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 87f9a68f5dd..7d3ecc773a6 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -1,6 +1,7 @@
import { GlLabel, GlLoadingIcon, GlTooltip } from '@gitlab/ui';
import { range } from 'lodash';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import BoardBlockedIcon from '~/boards/components/board_blocked_icon.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
@@ -8,7 +9,7 @@ import { issuableTypes } from '~/boards/constants';
import eventHub from '~/boards/eventhub';
import defaultStore from '~/boards/stores';
import { updateHistory } from '~/lib/utils/url_utility';
-import { mockLabelList, mockIssue } from './mock_data';
+import { mockLabelList, mockIssue, mockIssueFullPath } from './mock_data';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/boards/eventhub');
@@ -44,7 +45,7 @@ describe('Board card component', () => {
const findEpicCountablesTotalWeight = () => wrapper.findByTestId('epic-countables-total-weight');
const findEpicProgressTooltip = () => wrapper.findByTestId('epic-progress-tooltip-content');
- const createStore = ({ isEpicBoard = false } = {}) => {
+ const createStore = ({ isEpicBoard = false, isProjectBoard = false } = {}) => {
store = new Vuex.Store({
...defaultStore,
state: {
@@ -54,7 +55,7 @@ describe('Board card component', () => {
getters: {
isGroupBoard: () => true,
isEpicBoard: () => isEpicBoard,
- isProjectBoard: () => false,
+ isProjectBoard: () => isProjectBoard,
},
});
};
@@ -133,6 +134,17 @@ describe('Board card component', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
+ it('does not render item reference path', () => {
+ createStore({ isProjectBoard: true });
+ createWrapper();
+
+ expect(wrapper.find('.board-card-number').text()).not.toContain(mockIssueFullPath);
+ });
+
+ it('renders item reference path', () => {
+ expect(wrapper.find('.board-card-number').text()).toContain(mockIssueFullPath);
+ });
+
describe('blocked', () => {
it('renders blocked icon if issue is blocked', async () => {
createWrapper({
@@ -363,8 +375,6 @@ describe('Board card component', () => {
describe('filterByLabel method', () => {
beforeEach(() => {
- delete window.location;
-
wrapper.setProps({
updateFilters: true,
});
@@ -373,7 +383,7 @@ describe('Board card component', () => {
describe('when selected label is not in the filter', () => {
beforeEach(() => {
jest.spyOn(wrapper.vm, 'performSearch').mockImplementation(() => {});
- window.location = { search: '' };
+ setWindowLocation('?');
wrapper.vm.filterByLabel(label1);
});
@@ -394,7 +404,7 @@ describe('Board card component', () => {
describe('when selected label is already in the filter', () => {
beforeEach(() => {
jest.spyOn(wrapper.vm, 'performSearch').mockImplementation(() => {});
- window.location = { search: '?label_name[]=testing%20123' };
+ setWindowLocation('?label_name[]=testing%20123');
wrapper.vm.filterByLabel(label1);
});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index c440c110094..811f0043a01 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -4,8 +4,9 @@ import Vuex from 'vuex';
import BoardCard from '~/boards/components/board_card.vue';
import BoardList from '~/boards/components/board_list.vue';
import BoardNewIssue from '~/boards/components/board_new_issue.vue';
+import BoardNewItem from '~/boards/components/board_new_item.vue';
import defaultState from '~/boards/stores/state';
-import { mockList, mockIssuesByListId, issues } from './mock_data';
+import { mockList, mockIssuesByListId, issues, mockGroupProjects } from './mock_data';
export default function createComponent({
listIssueProps = {},
@@ -17,6 +18,7 @@ export default function createComponent({
state = defaultState,
stubs = {
BoardNewIssue,
+ BoardNewItem,
BoardCard,
},
} = {}) {
@@ -25,6 +27,7 @@ export default function createComponent({
const store = new Vuex.Store({
state: {
+ selectedProject: mockGroupProjects[0],
boardItemsByListId: mockIssuesByListId,
boardItems: issues,
pageInfoByListId: {
@@ -77,6 +80,7 @@ export default function createComponent({
provide: {
groupId: null,
rootPath: '/',
+ boardId: '1',
weightFeatureAvailable: false,
boardWeight: null,
canAdminList: true,
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index a3b1810ab80..6f623eab1af 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -1,3 +1,5 @@
+import Draggable from 'vuedraggable';
+import { DraggableItemTypes } from 'ee_else_ce/boards/constants';
import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
import createComponent from 'jest/boards/board_list_helper';
import BoardCard from '~/boards/components/board_card.vue';
@@ -10,6 +12,23 @@ describe('Board list component', () => {
const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`);
const findIssueCountLoadingIcon = () => wrapper.find('[data-testid="count-loading-icon"]');
+ const findDraggable = () => wrapper.findComponent(Draggable);
+
+ const startDrag = (
+ params = {
+ item: {
+ dataset: {
+ draggableItemType: DraggableItemTypes.card,
+ },
+ },
+ },
+ ) => {
+ findByTestId('tree-root-wrapper').vm.$emit('start', params);
+ };
+
+ const endDrag = (params) => {
+ findByTestId('tree-root-wrapper').vm.$emit('end', params);
+ };
useFakeRequestAnimationFrame();
@@ -155,40 +174,89 @@ describe('Board list component', () => {
});
describe('drag & drop issue', () => {
- beforeEach(() => {
- wrapper = createComponent();
- });
+ describe('when dragging is allowed', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ componentProps: {
+ disabled: false,
+ },
+ });
+ });
- describe('handleDragOnStart', () => {
- it('adds a class `is-dragging` to document body', () => {
- expect(document.body.classList.contains('is-dragging')).toBe(false);
+ it('Draggable is used', () => {
+ expect(findDraggable().exists()).toBe(true);
+ });
+
+ describe('handleDragOnStart', () => {
+ it('adds a class `is-dragging` to document body', () => {
+ expect(document.body.classList.contains('is-dragging')).toBe(false);
- findByTestId('tree-root-wrapper').vm.$emit('start');
+ startDrag();
- expect(document.body.classList.contains('is-dragging')).toBe(true);
+ expect(document.body.classList.contains('is-dragging')).toBe(true);
+ });
});
- });
- describe('handleDragOnEnd', () => {
- it('removes class `is-dragging` from document body', () => {
- jest.spyOn(wrapper.vm, 'moveItem').mockImplementation(() => {});
- document.body.classList.add('is-dragging');
+ describe('handleDragOnEnd', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'moveItem').mockImplementation(() => {});
+
+ startDrag();
+ });
+
+ it('removes class `is-dragging` from document body', () => {
+ document.body.classList.add('is-dragging');
+
+ endDrag({
+ oldIndex: 1,
+ newIndex: 0,
+ item: {
+ dataset: {
+ draggableItemType: DraggableItemTypes.card,
+ itemId: mockIssues[0].id,
+ itemIid: mockIssues[0].iid,
+ itemPath: mockIssues[0].referencePath,
+ },
+ },
+ to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
+ from: { dataset: { listId: 'gid://gitlab/List/2' } },
+ });
- findByTestId('tree-root-wrapper').vm.$emit('end', {
- oldIndex: 1,
- newIndex: 0,
- item: {
- dataset: {
- itemId: mockIssues[0].id,
- itemIid: mockIssues[0].iid,
- itemPath: mockIssues[0].referencePath,
+ expect(document.body.classList.contains('is-dragging')).toBe(false);
+ });
+
+ it(`should not handle the event if the dragged item is not a "${DraggableItemTypes.card}"`, () => {
+ endDrag({
+ oldIndex: 1,
+ newIndex: 0,
+ item: {
+ dataset: {
+ draggableItemType: DraggableItemTypes.list,
+ itemId: mockIssues[0].id,
+ itemIid: mockIssues[0].iid,
+ itemPath: mockIssues[0].referencePath,
+ },
},
+ to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
+ from: { dataset: { listId: 'gid://gitlab/List/2' } },
+ });
+
+ expect(document.body.classList.contains('is-dragging')).toBe(true);
+ });
+ });
+ });
+
+ describe('when dragging is not allowed', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ componentProps: {
+ disabled: true,
},
- to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
- from: { dataset: { listId: 'gid://gitlab/List/2' } },
});
+ });
- expect(document.body.classList.contains('is-dragging')).toBe(false);
+ it('Draggable is not used', () => {
+ expect(findDraggable().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 9a9ce7b8dc1..25ec568e48d 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -31,6 +31,7 @@ describe('Board card', () => {
actions: mockActions,
getters: {
isEpicBoard: () => false,
+ isProjectBoard: () => false,
},
});
};
diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js
index 6ac5d16e5a3..50f86e92adb 100644
--- a/spec/frontend/boards/components/board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/board_filtered_search_spec.js
@@ -115,6 +115,9 @@ describe('BoardFilteredSearch', () => {
{ type: 'author_username', value: { data: 'root', operator: '=' } },
{ type: 'label_name', value: { data: 'label', operator: '=' } },
{ type: 'label_name', value: { data: 'label2', operator: '=' } },
+ { type: 'milestone_title', value: { data: 'New Milestone', operator: '=' } },
+ { type: 'types', value: { data: 'INCIDENT', operator: '=' } },
+ { type: 'weight', value: { data: '2', operator: '=' } },
];
jest.spyOn(urlUtility, 'updateHistory');
findFilteredSearch().vm.$emit('onFilter', mockFilters);
@@ -122,7 +125,8 @@ describe('BoardFilteredSearch', () => {
expect(urlUtility.updateHistory).toHaveBeenCalledWith({
title: '',
replace: true,
- url: 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2',
+ url:
+ 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2&milestone_title=New+Milestone&types=INCIDENT&weight=2',
});
});
});
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 3966c3e6b87..52f1907654a 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,5 +1,6 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
@@ -75,10 +76,6 @@ describe('BoardForm', () => {
});
};
- beforeEach(() => {
- delete window.location;
- });
-
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -244,7 +241,7 @@ describe('BoardForm', () => {
updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
},
});
- window.location = new URL('https://test/boards/1');
+ setWindowLocation('https://test/boards/1');
createComponent({ canAdminBoard: true, currentPage: formType.edit });
findInput().trigger('keyup.enter', { metaKey: true });
@@ -270,7 +267,7 @@ describe('BoardForm', () => {
updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
},
});
- window.location = new URL('https://test/boards/1?group_by=epic');
+ setWindowLocation('https://test/boards/1?group_by=epic');
createComponent({ canAdminBoard: true, currentPage: formType.edit });
findInput().trigger('keyup.enter', { metaKey: true });
diff --git a/spec/frontend/boards/components/board_new_issue_spec.js b/spec/frontend/boards/components/board_new_issue_spec.js
index e6405bbcff3..57ccebf3676 100644
--- a/spec/frontend/boards/components/board_new_issue_spec.js
+++ b/spec/frontend/boards/components/board_new_issue_spec.js
@@ -1,6 +1,9 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import BoardNewIssue from '~/boards/components/board_new_issue.vue';
+import BoardNewItem from '~/boards/components/board_new_item.vue';
+import ProjectSelect from '~/boards/components/project_select.vue';
+import eventHub from '~/boards/eventhub';
import { mockList, mockGroupProjects } from '../mock_data';
@@ -8,107 +11,104 @@ const localVue = createLocalVue();
localVue.use(Vuex);
+const addListNewIssuesSpy = jest.fn().mockResolvedValue();
+const mockActions = { addListNewIssue: addListNewIssuesSpy };
+
+const createComponent = ({
+ state = { selectedProject: mockGroupProjects[0], fullPath: mockGroupProjects[0].fullPath },
+ actions = mockActions,
+ getters = { isGroupBoard: () => true, isProjectBoard: () => false },
+} = {}) =>
+ shallowMount(BoardNewIssue, {
+ localVue,
+ store: new Vuex.Store({
+ state,
+ actions,
+ getters,
+ }),
+ propsData: {
+ list: mockList,
+ },
+ provide: {
+ groupId: 1,
+ weightFeatureAvailable: false,
+ boardWeight: null,
+ },
+ stubs: {
+ BoardNewItem,
+ },
+ });
+
describe('Issue boards new issue form', () => {
let wrapper;
- let vm;
-
- const addListNewIssuesSpy = jest.fn();
-
- const findSubmitButton = () => wrapper.find({ ref: 'submitButton' });
- const findCancelButton = () => wrapper.find({ ref: 'cancelButton' });
- const findSubmitForm = () => wrapper.find({ ref: 'submitForm' });
-
- const submitIssue = () => {
- const dummySubmitEvent = {
- preventDefault() {},
- };
- return findSubmitForm().trigger('submit', dummySubmitEvent);
- };
-
- beforeEach(() => {
- const store = new Vuex.Store({
- state: { selectedProject: mockGroupProjects[0] },
- actions: { addListNewIssue: addListNewIssuesSpy },
- getters: { isGroupBoard: () => false, isProjectBoard: () => true },
- });
-
- wrapper = shallowMount(BoardNewIssue, {
- propsData: {
- disabled: false,
- list: mockList,
- },
- store,
- localVue,
- provide: {
- groupId: null,
- weightFeatureAvailable: false,
- boardWeight: null,
- },
- });
+ const findBoardNewItem = () => wrapper.findComponent(BoardNewItem);
- vm = wrapper.vm;
+ beforeEach(async () => {
+ wrapper = createComponent();
- return vm.$nextTick();
+ await wrapper.vm.$nextTick();
});
afterEach(() => {
wrapper.destroy();
});
- it('calls submit if submit button is clicked', async () => {
- jest.spyOn(wrapper.vm, 'submit').mockImplementation();
- wrapper.setData({ title: 'Testing Title' });
-
- await vm.$nextTick();
- await submitIssue();
- expect(wrapper.vm.submit).toHaveBeenCalled();
- });
-
- it('disables submit button if title is empty', () => {
- expect(findSubmitButton().props().disabled).toBe(true);
+ it('renders board-new-item component', () => {
+ const boardNewItem = findBoardNewItem();
+ expect(boardNewItem.exists()).toBe(true);
+ expect(boardNewItem.props()).toEqual({
+ list: mockList,
+ formEventPrefix: 'toggle-issue-form-',
+ submitButtonTitle: 'Create issue',
+ disableSubmit: false,
+ });
});
- it('enables submit button if title is not empty', async () => {
- wrapper.setData({ title: 'Testing Title' });
-
- await vm.$nextTick();
- expect(wrapper.find({ ref: 'input' }).element.value).toBe('Testing Title');
- expect(findSubmitButton().props().disabled).toBe(false);
+ it('calls addListNewIssue action when `board-new-item` emits form-submit event', async () => {
+ findBoardNewItem().vm.$emit('form-submit', { title: 'Foo' });
+
+ await wrapper.vm.$nextTick();
+ expect(addListNewIssuesSpy).toHaveBeenCalledWith(expect.any(Object), {
+ list: mockList,
+ issueInput: {
+ title: 'Foo',
+ labelIds: [],
+ assigneeIds: [],
+ milestoneId: undefined,
+ projectPath: mockGroupProjects[0].fullPath,
+ },
+ });
});
- it('clears title after clicking cancel', async () => {
- findCancelButton().trigger('click');
+ it('emits event `toggle-issue-form` with current list Id suffix on eventHub when `board-new-item` emits form-cancel event', async () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation();
+ findBoardNewItem().vm.$emit('form-cancel');
- await vm.$nextTick();
- expect(vm.title).toBe('');
+ await wrapper.vm.$nextTick();
+ expect(eventHub.$emit).toHaveBeenCalledWith(`toggle-issue-form-${mockList.id}`);
});
- describe('submit success', () => {
- it('creates new issue', async () => {
- wrapper.setData({ title: 'create issue' });
+ describe('when in group issue board', () => {
+ it('renders project-select component within board-new-item component', () => {
+ const projectSelect = findBoardNewItem().findComponent(ProjectSelect);
- await vm.$nextTick();
- await submitIssue();
- expect(addListNewIssuesSpy).toHaveBeenCalled();
+ expect(projectSelect.exists()).toBe(true);
+ expect(projectSelect.props('list')).toEqual(mockList);
});
+ });
- it('enables button after submit', async () => {
- jest.spyOn(wrapper.vm, 'submit').mockImplementation();
- wrapper.setData({ title: 'create issue' });
-
- await vm.$nextTick();
- await submitIssue();
- expect(findSubmitButton().props().disabled).toBe(false);
+ describe('when in project issue board', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ getters: { isGroupBoard: () => false, isProjectBoard: () => true },
+ });
});
- it('clears title after submit', async () => {
- wrapper.setData({ title: 'create issue' });
+ it('does not render project-select component within board-new-item component', () => {
+ const projectSelect = findBoardNewItem().findComponent(ProjectSelect);
- await vm.$nextTick();
- await submitIssue();
- await vm.$nextTick();
- expect(vm.title).toBe('');
+ expect(projectSelect.exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/boards/components/board_new_item_spec.js b/spec/frontend/boards/components/board_new_item_spec.js
new file mode 100644
index 00000000000..0151d9c1c14
--- /dev/null
+++ b/spec/frontend/boards/components/board_new_item_spec.js
@@ -0,0 +1,103 @@
+import { GlForm, GlFormInput, GlButton } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+
+import BoardNewItem from '~/boards/components/board_new_item.vue';
+import eventHub from '~/boards/eventhub';
+
+import { mockList } from '../mock_data';
+
+const createComponent = ({
+ list = mockList,
+ formEventPrefix = 'toggle-issue-form-',
+ disabledSubmit = false,
+ submitButtonTitle = 'Create item',
+} = {}) =>
+ mountExtended(BoardNewItem, {
+ propsData: {
+ list,
+ formEventPrefix,
+ disabledSubmit,
+ submitButtonTitle,
+ },
+ slots: {
+ default: '<div id="default-slot"></div>',
+ },
+ stubs: {
+ GlForm,
+ },
+ });
+
+describe('BoardNewItem', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ it('renders gl-form component', () => {
+ expect(wrapper.findComponent(GlForm).exists()).toBe(true);
+ });
+
+ it('renders field label', () => {
+ expect(wrapper.find('label').exists()).toBe(true);
+ expect(wrapper.find('label').text()).toBe('Title');
+ });
+
+ it('renders gl-form-input field', () => {
+ expect(wrapper.findComponent(GlFormInput).exists()).toBe(true);
+ });
+
+ it('renders default slot contents', () => {
+ expect(wrapper.find('#default-slot').exists()).toBe(true);
+ });
+
+ it('renders submit and cancel buttons', () => {
+ const buttons = wrapper.findAllComponents(GlButton);
+ expect(buttons).toHaveLength(2);
+ expect(buttons.at(0).text()).toBe('Create item');
+ expect(buttons.at(1).text()).toBe('Cancel');
+ });
+
+ describe('events', () => {
+ const glForm = () => wrapper.findComponent(GlForm);
+ const titleInput = () => wrapper.find('input[name="issue_title"]');
+
+ it('emits `form-submit` event with title value when `submit` is triggered on gl-form', async () => {
+ titleInput().setValue('Foo');
+ await glForm().trigger('submit');
+
+ expect(wrapper.emitted('form-submit')).toBeTruthy();
+ expect(wrapper.emitted('form-submit')[0]).toEqual([
+ {
+ title: 'Foo',
+ list: mockList,
+ },
+ ]);
+ });
+
+ it('emits `scroll-board-list-` event with list.id on eventHub when `submit` is triggered on gl-form', async () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation();
+ await glForm().trigger('submit');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(`scroll-board-list-${mockList.id}`);
+ });
+
+ it('emits `form-cancel` event and clears title value when `reset` is triggered on gl-form', async () => {
+ titleInput().setValue('Foo');
+
+ await wrapper.vm.$nextTick();
+ expect(titleInput().element.value).toBe('Foo');
+
+ await glForm().trigger('reset');
+
+ expect(titleInput().element.value).toBe('');
+ expect(wrapper.emitted('form-cancel')).toBeTruthy();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
index 0e3cf59901e..b6de46f8db8 100644
--- a/spec/frontend/boards/components/issue_board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
@@ -1,16 +1,16 @@
import { shallowMount } from '@vue/test-utils';
import BoardFilteredSearch from '~/boards/components/board_filtered_search.vue';
import IssueBoardFilteredSpec from '~/boards/components/issue_board_filtered_search.vue';
-import { BoardType } from '~/boards/constants';
import issueBoardFilters from '~/boards/issue_board_filters';
import { mockTokens } from '../mock_data';
+jest.mock('~/boards/issue_board_filters');
+
describe('IssueBoardFilter', () => {
let wrapper;
- const createComponent = ({ initialFilterParams = {} } = {}) => {
+ const createComponent = () => {
wrapper = shallowMount(IssueBoardFilteredSpec, {
- provide: { initialFilterParams },
props: { fullPath: '', boardType: '' },
});
};
@@ -20,7 +20,17 @@ describe('IssueBoardFilter', () => {
});
describe('default', () => {
+ let fetchAuthorsSpy;
+ let fetchLabelsSpy;
beforeEach(() => {
+ fetchAuthorsSpy = jest.fn();
+ fetchLabelsSpy = jest.fn();
+
+ issueBoardFilters.mockReturnValue({
+ fetchAuthors: fetchAuthorsSpy,
+ fetchLabels: fetchLabelsSpy,
+ });
+
createComponent();
});
@@ -28,17 +38,10 @@ describe('IssueBoardFilter', () => {
expect(wrapper.find(BoardFilteredSearch).exists()).toBe(true);
});
- it.each([[BoardType.group], [BoardType.project]])(
- 'when boardType is %s we pass the correct tokens to BoardFilteredSearch',
- (boardType) => {
- const { fetchAuthors, fetchLabels } = issueBoardFilters({}, '', boardType);
+ it('passes the correct tokens to BoardFilteredSearch', () => {
+ const tokens = mockTokens(fetchLabelsSpy, fetchAuthorsSpy, wrapper.vm.fetchMilestones);
- const tokens = mockTokens(fetchLabels, fetchAuthors);
-
- expect(wrapper.find(BoardFilteredSearch).props('tokens').toString()).toBe(
- tokens.toString(),
- );
- },
- );
+ expect(wrapper.find(BoardFilteredSearch).props('tokens')).toEqual(tokens);
+ });
});
});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
index 8992a5780f3..60474767f2d 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
@@ -97,6 +97,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
addLabelIds: TEST_LABELS.map((label) => label.id),
projectPath: TEST_ISSUE_FULLPATH,
removeLabelIds: [],
+ iid: null,
});
});
});
@@ -121,6 +122,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
addLabelIds: [5, 7],
removeLabelIds: [6],
projectPath: TEST_ISSUE_FULLPATH,
+ iid: null,
});
});
});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 6ac4db8cdaa..106f7b04c4b 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -1,5 +1,6 @@
/* global List */
+import { GlFilteredSearchToken } from '@gitlab/ui';
import { keyBy } from 'lodash';
import Vue from 'vue';
import '~/boards/models/list';
@@ -8,6 +9,8 @@ import boardsStore from '~/boards/stores/boards_store';
import { __ } from '~/locale';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
+import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
+import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
export const boardObj = {
id: 1,
@@ -101,6 +104,17 @@ export const mockMilestone = {
due_date: '2019-12-31',
};
+export const mockMilestones = [
+ {
+ id: 'gid://gitlab/Milestone/1',
+ title: 'Milestone 1',
+ },
+ {
+ id: 'gid://gitlab/Milestone/2',
+ title: 'Milestone 2',
+ },
+];
+
export const assignees = [
{
id: 'gid://gitlab/User/2',
@@ -531,7 +545,7 @@ export const mockMoveData = {
...mockMoveIssueParams,
};
-export const mockTokens = (fetchLabels, fetchAuthors) => [
+export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones) => [
{
icon: 'labels',
title: __('Label'),
@@ -557,6 +571,7 @@ export const mockTokens = (fetchLabels, fetchAuthors) => [
token: AuthorToken,
unique: true,
fetchAuthors,
+ preloadedAuthors: [],
},
{
icon: 'user',
@@ -569,5 +584,35 @@ export const mockTokens = (fetchLabels, fetchAuthors) => [
token: AuthorToken,
unique: true,
fetchAuthors,
+ preloadedAuthors: [],
+ },
+ {
+ icon: 'issues',
+ title: __('Type'),
+ type: 'types',
+ operators: [{ value: '=', description: 'is' }],
+ token: GlFilteredSearchToken,
+ unique: true,
+ options: [
+ { icon: 'issue-type-issue', value: 'ISSUE', title: 'Issue' },
+ { icon: 'issue-type-incident', value: 'INCIDENT', title: 'Incident' },
+ ],
+ },
+ {
+ icon: 'clock',
+ title: __('Milestone'),
+ symbol: '%',
+ type: 'milestone_title',
+ token: MilestoneToken,
+ unique: true,
+ defaultMilestones: [],
+ fetchMilestones,
+ },
+ {
+ icon: 'weight',
+ title: __('Weight'),
+ type: 'weight',
+ token: WeightToken,
+ unique: true,
},
];
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 5e16e389ddc..1272a573d2f 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1,4 +1,7 @@
import * as Sentry from '@sentry/browser';
+import { cloneDeep } from 'lodash';
+import Vue from 'vue';
+import Vuex from 'vuex';
import {
inactiveId,
ISSUABLE,
@@ -6,6 +9,7 @@ import {
issuableTypes,
BoardType,
listsQuery,
+ DraggableItemTypes,
} from 'ee_else_ce/boards/constants';
import issueMoveListMutation from 'ee_else_ce/boards/graphql/issue_move_list.mutation.graphql';
import testAction from 'helpers/vuex_action_helper';
@@ -21,6 +25,7 @@ import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutati
import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql';
import actions, { gqlClient } from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
+import mutations from '~/boards/stores/mutations';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import {
@@ -37,6 +42,7 @@ import {
mockMoveState,
mockMoveData,
mockList,
+ mockMilestones,
} from '../mock_data';
jest.mock('~/flash');
@@ -45,6 +51,8 @@ jest.mock('~/flash');
// subgroups when the movIssue action is called.
const getProjectPath = (path) => path.split('#')[0];
+Vue.use(Vuex);
+
beforeEach(() => {
window.gon = { features: {} };
});
@@ -260,6 +268,87 @@ describe('fetchLists', () => {
);
});
+describe('fetchMilestones', () => {
+ const queryResponse = {
+ data: {
+ project: {
+ milestones: {
+ nodes: mockMilestones,
+ },
+ },
+ },
+ };
+
+ const queryErrors = {
+ data: {
+ project: {
+ errors: ['You cannot view these milestones'],
+ milestones: {},
+ },
+ },
+ };
+
+ function createStore({
+ state = {
+ boardType: 'project',
+ fullPath: 'gitlab-org/gitlab',
+ milestones: [],
+ milestonesLoading: false,
+ },
+ } = {}) {
+ return new Vuex.Store({
+ state,
+ mutations,
+ });
+ }
+
+ it('throws error if state.boardType is not group or project', () => {
+ const store = createStore({
+ state: {
+ boardType: 'invalid',
+ },
+ });
+
+ expect(() => actions.fetchMilestones(store)).toThrow(new Error('Unknown board type'));
+ });
+
+ it('sets milestonesLoading to true', async () => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ const store = createStore();
+
+ actions.fetchMilestones(store);
+
+ expect(store.state.milestonesLoading).toBe(true);
+ });
+
+ describe('success', () => {
+ it('sets state.milestones from query result', async () => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ const store = createStore();
+
+ await actions.fetchMilestones(store);
+
+ expect(store.state.milestonesLoading).toBe(false);
+ expect(store.state.milestones).toBe(mockMilestones);
+ });
+ });
+
+ describe('failure', () => {
+ it('sets state.milestones from query result', async () => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryErrors);
+
+ const store = createStore();
+
+ await expect(actions.fetchMilestones(store)).rejects.toThrow();
+
+ expect(store.state.milestonesLoading).toBe(false);
+ expect(store.state.error).toBe('Failed to load milestones.');
+ });
+ });
+});
+
describe('createList', () => {
it('should dispatch createIssueList action', () => {
testAction({
@@ -419,75 +508,114 @@ describe('fetchLabels', () => {
});
describe('moveList', () => {
- it('should commit MOVE_LIST mutation and dispatch updateList action', (done) => {
- const initialBoardListsState = {
- 'gid://gitlab/List/1': mockLists[0],
- 'gid://gitlab/List/2': mockLists[1],
- };
+ const backlogListId = 'gid://1';
+ const closedListId = 'gid://5';
- const state = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- boardType: 'group',
- disabled: false,
- boardLists: initialBoardListsState,
- };
+ const boardLists1 = {
+ 'gid://3': { listType: '', position: 0 },
+ 'gid://4': { listType: '', position: 1 },
+ 'gid://5': { listType: '', position: 2 },
+ };
- testAction(
- actions.moveList,
- {
- listId: 'gid://gitlab/List/1',
- replacedListId: 'gid://gitlab/List/2',
- newIndex: 1,
- adjustmentValue: 1,
- },
- state,
- [
- {
- type: types.MOVE_LIST,
- payload: { movedList: mockLists[0], listAtNewIndex: mockLists[1] },
- },
- ],
- [
- {
- type: 'updateList',
- payload: {
- listId: 'gid://gitlab/List/1',
- position: 0,
- backupList: initialBoardListsState,
- },
+ const boardLists2 = {
+ [backlogListId]: { listType: ListType.backlog, position: -Infinity },
+ [closedListId]: { listType: ListType.closed, position: Infinity },
+ ...cloneDeep(boardLists1),
+ };
+
+ const movableListsOrder = ['gid://3', 'gid://4', 'gid://5'];
+ const allListsOrder = [backlogListId, ...movableListsOrder, closedListId];
+
+ it(`should not handle the event if the dragged item is not a "${DraggableItemTypes.list}"`, () => {
+ return testAction({
+ action: actions.moveList,
+ payload: {
+ item: { dataset: { listId: '', draggableItemType: DraggableItemTypes.card } },
+ to: {
+ children: [],
},
- ],
- done,
- );
+ },
+ state: {},
+ expectedMutations: [],
+ expectedActions: [],
+ });
});
- it('should not commit MOVE_LIST or dispatch updateList if listId and replacedListId are the same', () => {
- const initialBoardListsState = {
- 'gid://gitlab/List/1': mockLists[0],
- 'gid://gitlab/List/2': mockLists[1],
- };
+ describe.each`
+ draggableFrom | draggableTo | boardLists | boardListsOrder | expectedMovableListsOrder
+ ${0} | ${2} | ${boardLists1} | ${movableListsOrder} | ${['gid://4', 'gid://5', 'gid://3']}
+ ${2} | ${0} | ${boardLists1} | ${movableListsOrder} | ${['gid://5', 'gid://3', 'gid://4']}
+ ${0} | ${1} | ${boardLists1} | ${movableListsOrder} | ${['gid://4', 'gid://3', 'gid://5']}
+ ${1} | ${2} | ${boardLists1} | ${movableListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
+ ${2} | ${1} | ${boardLists1} | ${movableListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
+ ${1} | ${3} | ${boardLists2} | ${allListsOrder} | ${['gid://4', 'gid://5', 'gid://3']}
+ ${3} | ${1} | ${boardLists2} | ${allListsOrder} | ${['gid://5', 'gid://3', 'gid://4']}
+ ${1} | ${2} | ${boardLists2} | ${allListsOrder} | ${['gid://4', 'gid://3', 'gid://5']}
+ ${2} | ${3} | ${boardLists2} | ${allListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
+ ${3} | ${2} | ${boardLists2} | ${allListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
+ `(
+ 'when moving a list from position $draggableFrom to $draggableTo with lists $boardListsOrder',
+ ({ draggableFrom, draggableTo, boardLists, boardListsOrder, expectedMovableListsOrder }) => {
+ const movedListId = boardListsOrder[draggableFrom];
+ const displacedListId = boardListsOrder[draggableTo];
+ const buildDraggablePayload = () => {
+ return {
+ item: {
+ dataset: {
+ listId: boardListsOrder[draggableFrom],
+ draggableItemType: DraggableItemTypes.list,
+ },
+ },
+ newIndex: draggableTo,
+ to: {
+ children: boardListsOrder.map((listId) => ({ dataset: { listId } })),
+ },
+ };
+ };
- const state = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- boardType: 'group',
- disabled: false,
- boardLists: initialBoardListsState,
- };
+ it('should commit MOVE_LIST mutations and dispatch updateList action with correct payloads', () => {
+ return testAction({
+ action: actions.moveList,
+ payload: buildDraggablePayload(),
+ state: { boardLists },
+ expectedMutations: [
+ {
+ type: types.MOVE_LISTS,
+ payload: expectedMovableListsOrder.map((listId, i) => ({ listId, position: i })),
+ },
+ ],
+ expectedActions: [
+ {
+ type: 'updateList',
+ payload: {
+ listId: movedListId,
+ position: movableListsOrder.findIndex((i) => i === displacedListId),
+ },
+ },
+ ],
+ });
+ });
+ },
+ );
- testAction(
- actions.moveList,
- {
- listId: 'gid://gitlab/List/1',
- replacedListId: 'gid://gitlab/List/1',
- newIndex: 1,
- adjustmentValue: 1,
- },
- state,
- [],
- [],
- );
+ describe('when moving from and to the same position', () => {
+ it('should not commit MOVE_LIST and should not dispatch updateList', () => {
+ const listId = 'gid://1000';
+
+ return testAction({
+ action: actions.moveList,
+ payload: {
+ item: { dataset: { listId, draggbaleItemType: DraggableItemTypes.list } },
+ newIndex: 0,
+ to: {
+ children: [{ dataset: { listId } }],
+ },
+ },
+ state: { boardLists: { [listId]: { position: 0 } } },
+ expectedMutations: [],
+ expectedActions: [],
+ });
+ });
});
});
@@ -549,7 +677,7 @@ describe('updateList', () => {
});
});
- it('should commit UPDATE_LIST_FAILURE mutation when API returns an error', (done) => {
+ it('should dispatch handleUpdateListFailure when API returns an error', () => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
updateBoardList: {
@@ -559,17 +687,31 @@ describe('updateList', () => {
},
});
- testAction(
+ return testAction(
actions.updateList,
{ listId: 'gid://gitlab/List/1', position: 1 },
createState(),
- [{ type: types.UPDATE_LIST_FAILURE }],
[],
- done,
+ [{ type: 'handleUpdateListFailure' }],
);
});
});
+describe('handleUpdateListFailure', () => {
+ it('should dispatch fetchLists action and commit SET_ERROR mutation', async () => {
+ await testAction({
+ action: actions.handleUpdateListFailure,
+ expectedMutations: [
+ {
+ type: types.SET_ERROR,
+ payload: 'An error occurred while updating the board list. Please try again.',
+ },
+ ],
+ expectedActions: [{ type: 'fetchLists' }],
+ });
+ });
+});
+
describe('toggleListCollapsed', () => {
it('should commit TOGGLE_LIST_COLLAPSED mutation', async () => {
const payload = { listId: 'gid://gitlab/List/1', collapsed: true };
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index 37f0969a39a..a2ba1e9eb5e 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -165,40 +165,26 @@ describe('Board Store Mutations', () => {
});
});
- describe('MOVE_LIST', () => {
- it('updates boardLists state with reordered lists', () => {
+ describe('MOVE_LISTS', () => {
+ it('updates the positions of board lists', () => {
state = {
...state,
boardLists: initialBoardListsState,
};
- mutations.MOVE_LIST(state, {
- movedList: mockLists[0],
- listAtNewIndex: mockLists[1],
- });
-
- expect(state.boardLists).toEqual({
- 'gid://gitlab/List/2': mockLists[1],
- 'gid://gitlab/List/1': mockLists[0],
- });
- });
- });
-
- describe('UPDATE_LIST_FAILURE', () => {
- it('updates boardLists state with previous order and sets error message', () => {
- state = {
- ...state,
- boardLists: {
- 'gid://gitlab/List/2': mockLists[1],
- 'gid://gitlab/List/1': mockLists[0],
+ mutations.MOVE_LISTS(state, [
+ {
+ listId: mockLists[0].id,
+ position: 1,
},
- error: undefined,
- };
-
- mutations.UPDATE_LIST_FAILURE(state, initialBoardListsState);
+ {
+ listId: mockLists[1].id,
+ position: 0,
+ },
+ ]);
- expect(state.boardLists).toEqual(initialBoardListsState);
- expect(state.error).toEqual('An error occurred while updating the list. Please try again.');
+ expect(state.boardLists[mockLists[0].id].position).toBe(1);
+ expect(state.boardLists[mockLists[1].id].position).toBe(0);
});
});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index eb18147fcef..5c7404c1175 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -123,6 +123,29 @@ describe('Ci variable modal', () => {
});
});
+ describe.each`
+ value | secret | rendered
+ ${'value'} | ${'secret_value'} | ${false}
+ ${'dollar$ign'} | ${'dollar$ign'} | ${true}
+ `('Adding a new variable', ({ value, secret, rendered }) => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidKeyVariable = {
+ ...variable,
+ key: 'key',
+ value,
+ secret_value: secret,
+ };
+ createComponent(mount);
+ store.state.variable = invalidKeyVariable;
+ });
+
+ it(`${rendered ? 'renders' : 'does not render'} the variable reference warning`, () => {
+ const warning = wrapper.find(`[data-testid='contains-variable-reference']`);
+ expect(warning.exists()).toBe(rendered);
+ });
+ });
+
describe('Editing a variable', () => {
beforeEach(() => {
const [variable] = mockData.mockVariables;
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index 42990334f0a..2a0610b1b0a 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture } from 'helpers/fixtures';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { setTestTimeout } from 'helpers/timeout';
import Clusters from '~/clusters/clusters_bundle';
import axios from '~/lib/utils/axios_utils';
@@ -8,6 +9,8 @@ import initProjectSelectDropdown from '~/project_select';
jest.mock('~/lib/utils/poll');
jest.mock('~/project_select');
+useMockLocationHelper();
+
describe('Clusters', () => {
setTestTimeout(1000);
@@ -55,20 +58,6 @@ describe('Clusters', () => {
});
describe('updateContainer', () => {
- const { location } = window;
-
- beforeEach(() => {
- delete window.location;
- window.location = {
- reload: jest.fn(),
- hash: location.hash,
- };
- });
-
- afterEach(() => {
- window.location = location;
- });
-
describe('when creating cluster', () => {
it('should show the creating container', () => {
cluster.updateContainer(null, 'creating');
diff --git a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
new file mode 100644
index 00000000000..1a2e188e7ae
--- /dev/null
+++ b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
@@ -0,0 +1,50 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CommitBoxPipelineMiniGraph from '~/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue';
+import { mockStages } from './mock_data';
+
+describe('Commit box pipeline mini graph', () => {
+ let wrapper;
+
+ const findMiniGraph = () => wrapper.findByTestId('commit-box-mini-graph');
+ const findUpstream = () => wrapper.findByTestId('commit-box-mini-graph-upstream');
+ const findDownstream = () => wrapper.findByTestId('commit-box-mini-graph-downstream');
+
+ const createComponent = () => {
+ wrapper = extendedWrapper(
+ shallowMount(CommitBoxPipelineMiniGraph, {
+ propsData: {
+ stages: mockStages,
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ pipeline: {
+ loading: false,
+ },
+ },
+ },
+ },
+ }),
+ );
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('linked pipelines', () => {
+ it('should display the mini pipeine graph', () => {
+ expect(findMiniGraph().exists()).toBe(true);
+ });
+
+ it('should not display linked pipelines', () => {
+ expect(findUpstream().exists()).toBe(false);
+ expect(findDownstream().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/commit/mock_data.js b/spec/frontend/commit/mock_data.js
new file mode 100644
index 00000000000..ef018a4fbd7
--- /dev/null
+++ b/spec/frontend/commit/mock_data.js
@@ -0,0 +1,117 @@
+export const mockStages = [
+ {
+ name: 'build',
+ title: 'build: passed',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/root/ci-project/-/pipelines/611#build',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/root/ci-project/-/pipelines/611#build',
+ dropdown_path: '/root/ci-project/-/pipelines/611/stage.json?stage=build',
+ },
+ {
+ name: 'test',
+ title: 'test: passed',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/root/ci-project/-/pipelines/611#test',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/root/ci-project/-/pipelines/611#test',
+ dropdown_path: '/root/ci-project/-/pipelines/611/stage.json?stage=test',
+ },
+ {
+ name: 'test_two',
+ title: 'test_two: passed',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/root/ci-project/-/pipelines/611#test_two',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/root/ci-project/-/pipelines/611#test_two',
+ dropdown_path: '/root/ci-project/-/pipelines/611/stage.json?stage=test_two',
+ },
+ {
+ name: 'manual',
+ title: 'manual: skipped',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/root/ci-project/-/pipelines/611#manual',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ action: {
+ icon: 'play',
+ title: 'Play all manual',
+ path: '/root/ci-project/-/pipelines/611/stages/manual/play_manual',
+ method: 'post',
+ button_title: 'Play all manual',
+ },
+ },
+ path: '/root/ci-project/-/pipelines/611#manual',
+ dropdown_path: '/root/ci-project/-/pipelines/611/stage.json?stage=manual',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: passed',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/root/ci-project/-/pipelines/611#deploy',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/root/ci-project/-/pipelines/611#deploy',
+ dropdown_path: '/root/ci-project/-/pipelines/611/stage.json?stage=deploy',
+ },
+ {
+ name: 'qa',
+ title: 'qa: passed',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/root/ci-project/-/pipelines/611#qa',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/root/ci-project/-/pipelines/611#qa',
+ dropdown_path: '/root/ci-project/-/pipelines/611/stage.json?stage=qa',
+ },
+];
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
index 35c02911e27..e508cddd6f9 100644
--- a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`content_editor/components/toolbar_button displays tertiary, small button with a provided label and icon 1`] = `
-"<b-button-stub size=\\"sm\\" variant=\\"default\\" type=\\"button\\" tag=\\"button\\" aria-label=\\"Bold\\" title=\\"Bold\\" class=\\"gl-mx-2 gl-button btn-default-tertiary btn-icon\\">
+"<b-button-stub size=\\"sm\\" variant=\\"default\\" type=\\"button\\" tag=\\"button\\" aria-label=\\"Bold\\" title=\\"Bold\\" class=\\"gl-button btn-default-tertiary btn-icon\\">
<!---->
<gl-icon-stub name=\\"bold\\" size=\\"16\\" class=\\"gl-button-icon\\"></gl-icon-stub>
<!---->
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
index e56c37b0dc9..3c88c05a4b4 100644
--- a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
@@ -26,8 +26,21 @@ exports[`content_editor/components/toolbar_link_button renders dropdown componen
</div>
</form>
</li>
- <!---->
- <!---->
+ <li role=\\"presentation\\" class=\\"gl-new-dropdown-divider\\">
+ <hr role=\\"separator\\" aria-orientation=\\"horizontal\\" class=\\"dropdown-divider\\">
+ </li>
+ <li role=\\"presentation\\" class=\\"gl-new-dropdown-item\\"><button role=\\"menuitem\\" type=\\"button\\" class=\\"dropdown-item\\">
+ <!---->
+ <!---->
+ <!---->
+ <div class=\\"gl-new-dropdown-item-text-wrapper\\">
+ <p class=\\"gl-new-dropdown-item-text-primary\\">
+ Upload file
+ </p>
+ <!---->
+ </div>
+ <!---->
+ </button></li> <input type=\\"file\\" name=\\"content_editor_attachment\\" class=\\"gl-display-none\\">
</div>
<!---->
</div>
diff --git a/spec/frontend/content_editor/components/content_editor_error_spec.js b/spec/frontend/content_editor/components/content_editor_error_spec.js
new file mode 100644
index 00000000000..8723fb5a338
--- /dev/null
+++ b/spec/frontend/content_editor/components/content_editor_error_spec.js
@@ -0,0 +1,54 @@
+import { GlAlert } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContentEditorError from '~/content_editor/components/content_editor_error.vue';
+import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
+import { createTestEditor, emitEditorEvent } from '../test_utils';
+
+describe('content_editor/components/content_editor_error', () => {
+ let wrapper;
+ let tiptapEditor;
+
+ const findErrorAlert = () => wrapper.findComponent(GlAlert);
+
+ const createWrapper = async () => {
+ tiptapEditor = createTestEditor();
+
+ wrapper = shallowMountExtended(ContentEditorError, {
+ provide: {
+ tiptapEditor,
+ },
+ stubs: {
+ EditorStateObserver,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders error when content editor emits an error event', async () => {
+ const error = 'error message';
+
+ createWrapper();
+
+ await emitEditorEvent({ tiptapEditor, event: 'error', params: { error } });
+
+ expect(findErrorAlert().text()).toBe(error);
+ });
+
+ it('allows dismissing the error', async () => {
+ const error = 'error message';
+
+ createWrapper();
+
+ await emitEditorEvent({ tiptapEditor, event: 'error', params: { error } });
+
+ findErrorAlert().vm.$emit('dismiss');
+
+ await nextTick();
+
+ expect(findErrorAlert().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index 563e80e04c1..d516baf6f0f 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -1,91 +1,175 @@
-import { GlAlert } from '@gitlab/ui';
+import { GlLoadingIcon } from '@gitlab/ui';
import { EditorContent } from '@tiptap/vue-2';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContentEditor from '~/content_editor/components/content_editor.vue';
+import ContentEditorError from '~/content_editor/components/content_editor_error.vue';
+import ContentEditorProvider from '~/content_editor/components/content_editor_provider.vue';
+import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
import TopToolbar from '~/content_editor/components/top_toolbar.vue';
-import { createContentEditor } from '~/content_editor/services/create_content_editor';
+import {
+ LOADING_CONTENT_EVENT,
+ LOADING_SUCCESS_EVENT,
+ LOADING_ERROR_EVENT,
+} from '~/content_editor/constants';
+import { emitEditorEvent } from '../test_utils';
+
+jest.mock('~/emoji');
describe('ContentEditor', () => {
let wrapper;
- let editor;
+ let contentEditor;
+ let renderMarkdown;
+ const uploadsPath = '/uploads';
const findEditorElement = () => wrapper.findByTestId('content-editor');
- const findErrorAlert = () => wrapper.findComponent(GlAlert);
+ const findEditorContent = () => wrapper.findComponent(EditorContent);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ const createWrapper = (propsData = {}) => {
+ renderMarkdown = jest.fn();
- const createWrapper = async (contentEditor) => {
wrapper = shallowMountExtended(ContentEditor, {
propsData: {
- contentEditor,
+ renderMarkdown,
+ uploadsPath,
+ ...propsData,
+ },
+ stubs: {
+ EditorStateObserver,
+ ContentEditorProvider,
+ },
+ listeners: {
+ initialized(editor) {
+ contentEditor = editor;
+ },
},
});
};
- beforeEach(() => {
- editor = createContentEditor({ renderMarkdown: () => true });
- });
-
afterEach(() => {
wrapper.destroy();
});
- it('renders editor content component and attaches editor instance', () => {
- createWrapper(editor);
+ it('triggers initialized event and provides contentEditor instance as event data', () => {
+ createWrapper();
- const editorContent = wrapper.findComponent(EditorContent);
+ expect(contentEditor).not.toBeFalsy();
+ });
+
+ it('renders EditorContent component and provides tiptapEditor instance', () => {
+ createWrapper();
+
+ const editorContent = findEditorContent();
- expect(editorContent.props().editor).toBe(editor.tiptapEditor);
+ expect(editorContent.props().editor).toBe(contentEditor.tiptapEditor);
expect(editorContent.classes()).toContain('md');
});
- it('renders top toolbar component and attaches editor instance', () => {
- createWrapper(editor);
+ it('renders ContentEditorProvider component', () => {
+ createWrapper();
- expect(wrapper.findComponent(TopToolbar).props().contentEditor).toBe(editor);
+ expect(wrapper.findComponent(ContentEditorProvider).exists()).toBe(true);
});
- it.each`
- isFocused | classes
- ${true} | ${['md-area', 'is-focused']}
- ${false} | ${['md-area']}
- `(
- 'has $classes class selectors when tiptapEditor.isFocused = $isFocused',
- ({ isFocused, classes }) => {
- editor.tiptapEditor.isFocused = isFocused;
- createWrapper(editor);
+ it('renders top toolbar component', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(TopToolbar).exists()).toBe(true);
+ });
- expect(findEditorElement().classes()).toStrictEqual(classes);
- },
- );
+ it('adds is-focused class when focus event is emitted', async () => {
+ createWrapper();
- it('adds isFocused class when tiptapEditor is focused', () => {
- editor.tiptapEditor.isFocused = true;
- createWrapper(editor);
+ await emitEditorEvent({ tiptapEditor: contentEditor.tiptapEditor, event: 'focus' });
expect(findEditorElement().classes()).toContain('is-focused');
});
- describe('displaying error', () => {
- const error = 'Content Editor error';
+ it('removes is-focused class when blur event is emitted', async () => {
+ createWrapper();
+
+ await emitEditorEvent({ tiptapEditor: contentEditor.tiptapEditor, event: 'focus' });
+ await emitEditorEvent({ tiptapEditor: contentEditor.tiptapEditor, event: 'blur' });
+
+ expect(findEditorElement().classes()).not.toContain('is-focused');
+ });
+
+ it('emits change event when document is updated', async () => {
+ createWrapper();
+
+ await emitEditorEvent({ tiptapEditor: contentEditor.tiptapEditor, event: 'update' });
+
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ empty: contentEditor.empty,
+ },
+ ],
+ ]);
+ });
+
+ it('renders content_editor_error component', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(ContentEditorError).exists()).toBe(true);
+ });
+ describe('when loading content', () => {
beforeEach(async () => {
- createWrapper(editor);
+ createWrapper();
- editor.tiptapEditor.emit('error', error);
+ contentEditor.emit(LOADING_CONTENT_EVENT);
await nextTick();
});
- it('displays error notifications from the tiptap editor', () => {
- expect(findErrorAlert().text()).toBe(error);
+ it('displays loading indicator', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
});
- it('allows dismissing an error alert', async () => {
- findErrorAlert().vm.$emit('dismiss');
+ it('hides EditorContent component', () => {
+ expect(findEditorContent().exists()).toBe(false);
+ });
+ });
+
+ describe('when loading content succeeds', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ contentEditor.emit(LOADING_CONTENT_EVENT);
+ await nextTick();
+ contentEditor.emit(LOADING_SUCCESS_EVENT);
+ await nextTick();
+ });
+
+ it('hides loading indicator', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ it('displays EditorContent component', () => {
+ expect(findEditorContent().exists()).toBe(true);
+ });
+ });
+
+ describe('when loading content fails', () => {
+ const error = 'error';
+
+ beforeEach(async () => {
+ createWrapper();
+
+ contentEditor.emit(LOADING_CONTENT_EVENT);
+ await nextTick();
+ contentEditor.emit(LOADING_ERROR_EVENT, error);
await nextTick();
+ });
+
+ it('hides loading indicator', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
- expect(findErrorAlert().exists()).toBe(false);
+ it('displays EditorContent component', () => {
+ expect(findEditorContent().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/content_editor/components/editor_state_observer_spec.js b/spec/frontend/content_editor/components/editor_state_observer_spec.js
new file mode 100644
index 00000000000..5e4bb348e1f
--- /dev/null
+++ b/spec/frontend/content_editor/components/editor_state_observer_spec.js
@@ -0,0 +1,75 @@
+import { shallowMount } from '@vue/test-utils';
+import { each } from 'lodash';
+import EditorStateObserver, {
+ tiptapToComponentMap,
+} from '~/content_editor/components/editor_state_observer.vue';
+import { createTestEditor } from '../test_utils';
+
+describe('content_editor/components/editor_state_observer', () => {
+ let tiptapEditor;
+ let wrapper;
+ let onDocUpdateListener;
+ let onSelectionUpdateListener;
+ let onTransactionListener;
+
+ const buildEditor = () => {
+ tiptapEditor = createTestEditor();
+ jest.spyOn(tiptapEditor, 'on');
+ };
+
+ const buildWrapper = () => {
+ wrapper = shallowMount(EditorStateObserver, {
+ provide: { tiptapEditor },
+ listeners: {
+ docUpdate: onDocUpdateListener,
+ selectionUpdate: onSelectionUpdateListener,
+ transaction: onTransactionListener,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ onDocUpdateListener = jest.fn();
+ onSelectionUpdateListener = jest.fn();
+ onTransactionListener = jest.fn();
+ buildEditor();
+ buildWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when editor content changes', () => {
+ it('emits update, selectionUpdate, and transaction events', () => {
+ const content = '<p>My paragraph</p>';
+
+ tiptapEditor.commands.insertContent(content);
+
+ expect(onDocUpdateListener).toHaveBeenCalledWith(
+ expect.objectContaining({ editor: tiptapEditor }),
+ );
+ expect(onSelectionUpdateListener).toHaveBeenCalledWith(
+ expect.objectContaining({ editor: tiptapEditor }),
+ );
+ expect(onSelectionUpdateListener).toHaveBeenCalledWith(
+ expect.objectContaining({ editor: tiptapEditor }),
+ );
+ });
+ });
+
+ describe('when component is destroyed', () => {
+ it('removes onTiptapDocUpdate and onTiptapSelectionUpdate hooks', () => {
+ jest.spyOn(tiptapEditor, 'off');
+
+ wrapper.destroy();
+
+ each(tiptapToComponentMap, (_, tiptapEvent) => {
+ expect(tiptapEditor.off).toHaveBeenCalledWith(
+ tiptapEvent,
+ tiptapEditor.on.mock.calls.find(([eventName]) => eventName === tiptapEvent)[1],
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/formatting_bubble_menu_spec.js b/spec/frontend/content_editor/components/formatting_bubble_menu_spec.js
new file mode 100644
index 00000000000..e44a7fa4ddb
--- /dev/null
+++ b/spec/frontend/content_editor/components/formatting_bubble_menu_spec.js
@@ -0,0 +1,80 @@
+import { BubbleMenu } from '@tiptap/vue-2';
+import { mockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import FormattingBubbleMenu from '~/content_editor/components/formatting_bubble_menu.vue';
+
+import {
+ BUBBLE_MENU_TRACKING_ACTION,
+ CONTENT_EDITOR_TRACKING_LABEL,
+} from '~/content_editor/constants';
+import { createTestEditor } from '../test_utils';
+
+describe('content_editor/components/top_toolbar', () => {
+ let wrapper;
+ let trackingSpy;
+ let tiptapEditor;
+
+ const buildEditor = () => {
+ tiptapEditor = createTestEditor();
+
+ jest.spyOn(tiptapEditor, 'isActive');
+ };
+
+ const buildWrapper = () => {
+ wrapper = shallowMountExtended(FormattingBubbleMenu, {
+ provide: {
+ tiptapEditor,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, null, jest.spyOn);
+ buildEditor();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders bubble menu component', () => {
+ buildWrapper();
+ const bubbleMenu = wrapper.findComponent(BubbleMenu);
+
+ expect(bubbleMenu.props().editor).toBe(tiptapEditor);
+ expect(bubbleMenu.classes()).toEqual(['gl-shadow', 'gl-rounded-base']);
+ });
+
+ describe.each`
+ testId | controlProps
+ ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold', size: 'medium', category: 'primary' }}
+ ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic', size: 'medium', category: 'primary' }}
+ ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike', size: 'medium', category: 'primary' }}
+ ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode', size: 'medium', category: 'primary' }}
+ `('given a $testId toolbar control', ({ testId, controlProps }) => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
+ it('renders the toolbar control with the provided properties', () => {
+ expect(wrapper.findByTestId(testId).exists()).toBe(true);
+
+ Object.keys(controlProps).forEach((propName) => {
+ expect(wrapper.findByTestId(testId).props(propName)).toBe(controlProps[propName]);
+ });
+ });
+
+ it('tracks the execution of toolbar controls', () => {
+ const eventData = { contentType: 'italic', value: 1 };
+ const { contentType, value } = eventData;
+
+ wrapper.findByTestId(testId).vm.$emit('execute', eventData);
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, BUBBLE_MENU_TRACKING_ACTION, {
+ label: CONTENT_EDITOR_TRACKING_LABEL,
+ property: contentType,
+ value,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/toolbar_button_spec.js b/spec/frontend/content_editor/components/toolbar_button_spec.js
index d848adcbff8..60263c46bdd 100644
--- a/spec/frontend/content_editor/components/toolbar_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_button_spec.js
@@ -1,7 +1,8 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
import ToolbarButton from '~/content_editor/components/toolbar_button.vue';
-import { createTestEditor, mockChainedCommands } from '../test_utils';
+import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
describe('content_editor/components/toolbar_button', () => {
let wrapper;
@@ -20,9 +21,12 @@ describe('content_editor/components/toolbar_button', () => {
wrapper = shallowMount(ToolbarButton, {
stubs: {
GlButton,
+ EditorStateObserver,
},
- propsData: {
+ provide: {
tiptapEditor,
+ },
+ propsData: {
contentType: CONTENT_TYPE,
iconName: ICON_NAME,
label: LABEL,
@@ -46,19 +50,43 @@ describe('content_editor/components/toolbar_button', () => {
expect(findButton().html()).toMatchSnapshot();
});
+ it('allows customizing the variant, category, size of the button', () => {
+ const variant = 'danger';
+ const category = 'secondary';
+ const size = 'medium';
+
+ buildWrapper({
+ variant,
+ category,
+ size,
+ });
+
+ expect(findButton().props()).toMatchObject({
+ variant,
+ category,
+ size,
+ });
+ });
+
it.each`
editorState | outcomeDescription | outcome
${{ isActive: true, isFocused: true }} | ${'button is active'} | ${true}
${{ isActive: false, isFocused: true }} | ${'button is not active'} | ${false}
${{ isActive: true, isFocused: false }} | ${'button is not active '} | ${false}
- `('$outcomeDescription when when editor state is $editorState', ({ editorState, outcome }) => {
- tiptapEditor.isActive.mockReturnValueOnce(editorState.isActive);
- tiptapEditor.isFocused = editorState.isFocused;
- buildWrapper();
+ `(
+ '$outcomeDescription when when editor state is $editorState',
+ async ({ editorState, outcome }) => {
+ tiptapEditor.isActive.mockReturnValueOnce(editorState.isActive);
+ tiptapEditor.isFocused = editorState.isFocused;
- expect(findButton().classes().includes('active')).toBe(outcome);
- expect(tiptapEditor.isActive).toHaveBeenCalledWith(CONTENT_TYPE);
- });
+ buildWrapper();
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+
+ expect(findButton().classes().includes('active')).toBe(outcome);
+ expect(tiptapEditor.isActive).toHaveBeenCalledWith(CONTENT_TYPE);
+ },
+ );
describe('when button is clicked', () => {
it('executes the content type command when executeCommand = true', async () => {
diff --git a/spec/frontend/content_editor/components/toolbar_image_button_spec.js b/spec/frontend/content_editor/components/toolbar_image_button_spec.js
index 701dcf83476..dab7e67d7c5 100644
--- a/spec/frontend/content_editor/components/toolbar_image_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_image_button_spec.js
@@ -1,7 +1,8 @@
import { GlButton, GlFormInputGroup } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarImageButton from '~/content_editor/components/toolbar_image_button.vue';
-import { configure as configureImageExtension } from '~/content_editor/extensions/image';
+import Attachment from '~/content_editor/extensions/attachment';
+import Image from '~/content_editor/extensions/image';
import { createTestEditor, mockChainedCommands } from '../test_utils';
describe('content_editor/components/toolbar_image_button', () => {
@@ -10,7 +11,7 @@ describe('content_editor/components/toolbar_image_button', () => {
const buildWrapper = () => {
wrapper = mountExtended(ToolbarImageButton, {
- propsData: {
+ provide: {
tiptapEditor: editor,
},
});
@@ -29,13 +30,14 @@ describe('content_editor/components/toolbar_image_button', () => {
};
beforeEach(() => {
- const { tiptapExtension: Image } = configureImageExtension({
- renderMarkdown: jest.fn(),
- uploadsPath: '/uploads/',
- });
-
editor = createTestEditor({
- extensions: [Image],
+ extensions: [
+ Image,
+ Attachment.configure({
+ renderMarkdown: jest.fn(),
+ uploadsPath: '/uploads/',
+ }),
+ ],
});
buildWrapper();
@@ -64,13 +66,13 @@ describe('content_editor/components/toolbar_image_button', () => {
});
it('uploads the selected image when file input changes', async () => {
- const commands = mockChainedCommands(editor, ['focus', 'uploadImage', 'run']);
+ const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']);
const file = new File(['foo'], 'foo.png', { type: 'image/png' });
await selectFile(file);
expect(commands.focus).toHaveBeenCalled();
- expect(commands.uploadImage).toHaveBeenCalledWith({ file });
+ expect(commands.uploadAttachment).toHaveBeenCalledWith({ file });
expect(commands.run).toHaveBeenCalled();
expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'image', value: 'upload' }]);
diff --git a/spec/frontend/content_editor/components/toolbar_link_button_spec.js b/spec/frontend/content_editor/components/toolbar_link_button_spec.js
index 576a2912f72..0cf488260bd 100644
--- a/spec/frontend/content_editor/components/toolbar_link_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_link_button_spec.js
@@ -1,9 +1,9 @@
-import { GlDropdown, GlDropdownDivider, GlButton, GlFormInputGroup } from '@gitlab/ui';
+import { GlDropdown, GlButton, GlFormInputGroup } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarLinkButton from '~/content_editor/components/toolbar_link_button.vue';
-import { tiptapExtension as Link } from '~/content_editor/extensions/link';
+import Link from '~/content_editor/extensions/link';
import { hasSelection } from '~/content_editor/services/utils';
-import { createTestEditor, mockChainedCommands } from '../test_utils';
+import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
jest.mock('~/content_editor/services/utils');
@@ -13,21 +13,26 @@ describe('content_editor/components/toolbar_link_button', () => {
const buildWrapper = () => {
wrapper = mountExtended(ToolbarLinkButton, {
- propsData: {
+ provide: {
tiptapEditor: editor,
},
});
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownDivider = () => wrapper.findComponent(GlDropdownDivider);
const findLinkURLInput = () => wrapper.findComponent(GlFormInputGroup).find('input[type="text"]');
const findApplyLinkButton = () => wrapper.findComponent(GlButton);
const findRemoveLinkButton = () => wrapper.findByText('Remove link');
+ const selectFile = async (file) => {
+ const input = wrapper.find({ ref: 'fileSelector' });
+
+ // override the property definition because `input.files` isn't directly modifyable
+ Object.defineProperty(input.element, 'files', { value: [file], writable: true });
+ await input.trigger('change');
+ };
+
beforeEach(() => {
- editor = createTestEditor({
- extensions: [Link],
- });
+ editor = createTestEditor();
});
afterEach(() => {
@@ -45,14 +50,19 @@ describe('content_editor/components/toolbar_link_button', () => {
beforeEach(async () => {
jest.spyOn(editor, 'isActive').mockReturnValueOnce(true);
buildWrapper();
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor: editor });
});
it('sets dropdown as active when link extension is active', () => {
expect(findDropdown().props('toggleClass')).toEqual({ active: true });
});
+ it('does not display the upload file option', () => {
+ expect(wrapper.findByText('Upload file').exists()).toBe(false);
+ });
+
it('displays a remove link dropdown option', () => {
- expect(findDropdownDivider().exists()).toBe(true);
expect(wrapper.findByText('Remove link').exists()).toBe(true);
});
@@ -90,7 +100,7 @@ describe('content_editor/components/toolbar_link_button', () => {
href: '/username/my-project/uploads/abcdefgh133535/my-file.zip',
});
- await editor.emit('selectionUpdate', { editor });
+ await emitEditorEvent({ event: 'transaction', tiptapEditor: editor });
expect(findLinkURLInput().element.value).toEqual('uploads/my-file.zip');
});
@@ -100,14 +110,14 @@ describe('content_editor/components/toolbar_link_button', () => {
href: 'https://gitlab.com',
});
- await editor.emit('selectionUpdate', { editor });
+ await emitEditorEvent({ event: 'transaction', tiptapEditor: editor });
expect(findLinkURLInput().element.value).toEqual('https://gitlab.com');
});
});
});
- describe('when there is not an active link', () => {
+ describe('when there is no active link', () => {
beforeEach(() => {
jest.spyOn(editor, 'isActive');
editor.isActive.mockReturnValueOnce(false);
@@ -118,8 +128,11 @@ describe('content_editor/components/toolbar_link_button', () => {
expect(findDropdown().props('toggleClass')).toEqual({ active: false });
});
+ it('displays the upload file option', () => {
+ expect(wrapper.findByText('Upload file').exists()).toBe(true);
+ });
+
it('does not display a remove link dropdown option', () => {
- expect(findDropdownDivider().exists()).toBe(false);
expect(wrapper.findByText('Remove link').exists()).toBe(false);
});
@@ -138,6 +151,19 @@ describe('content_editor/components/toolbar_link_button', () => {
expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]);
});
+
+ it('uploads the selected image when file input changes', async () => {
+ const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']);
+ const file = new File(['foo'], 'foo.png', { type: 'image/png' });
+
+ await selectFile(file);
+
+ expect(commands.focus).toHaveBeenCalled();
+ expect(commands.uploadAttachment).toHaveBeenCalledWith({ file });
+ expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]);
+ });
});
describe('when the user displays the dropdown', () => {
diff --git a/spec/frontend/content_editor/components/toolbar_table_button_spec.js b/spec/frontend/content_editor/components/toolbar_table_button_spec.js
index 237b2848246..056e5e04e1f 100644
--- a/spec/frontend/content_editor/components/toolbar_table_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_table_button_spec.js
@@ -1,10 +1,6 @@
import { GlDropdown, GlButton } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarTableButton from '~/content_editor/components/toolbar_table_button.vue';
-import { tiptapExtension as Table } from '~/content_editor/extensions/table';
-import { tiptapExtension as TableCell } from '~/content_editor/extensions/table_cell';
-import { tiptapExtension as TableHeader } from '~/content_editor/extensions/table_header';
-import { tiptapExtension as TableRow } from '~/content_editor/extensions/table_row';
import { createTestEditor, mockChainedCommands } from '../test_utils';
describe('content_editor/components/toolbar_table_button', () => {
@@ -13,7 +9,7 @@ describe('content_editor/components/toolbar_table_button', () => {
const buildWrapper = () => {
wrapper = mountExtended(ToolbarTableButton, {
- propsData: {
+ provide: {
tiptapEditor: editor,
},
});
@@ -23,9 +19,7 @@ describe('content_editor/components/toolbar_table_button', () => {
const getNumButtons = () => findDropdown().findAllComponents(GlButton).length;
beforeEach(() => {
- editor = createTestEditor({
- extensions: [Table, TableCell, TableRow, TableHeader],
- });
+ editor = createTestEditor();
buildWrapper();
});
@@ -35,17 +29,17 @@ describe('content_editor/components/toolbar_table_button', () => {
wrapper.destroy();
});
- it('renders a grid of 3x3 buttons to create a table', () => {
- expect(getNumButtons()).toBe(9); // 3 x 3
+ it('renders a grid of 5x5 buttons to create a table', () => {
+ expect(getNumButtons()).toBe(25); // 5x5
});
describe.each`
row | col | numButtons | tableSize
- ${1} | ${2} | ${9} | ${'1x2'}
- ${2} | ${2} | ${9} | ${'2x2'}
- ${2} | ${3} | ${12} | ${'2x3'}
- ${3} | ${2} | ${12} | ${'3x2'}
- ${3} | ${3} | ${16} | ${'3x3'}
+ ${3} | ${4} | ${25} | ${'3x4'}
+ ${4} | ${4} | ${25} | ${'4x4'}
+ ${4} | ${5} | ${30} | ${'4x5'}
+ ${5} | ${4} | ${30} | ${'5x4'}
+ ${5} | ${5} | ${36} | ${'5x5'}
`('button($row, $col) in the table creator grid', ({ row, col, numButtons, tableSize }) => {
describe('on mouse over', () => {
beforeEach(async () => {
@@ -56,9 +50,7 @@ describe('content_editor/components/toolbar_table_button', () => {
it('marks all rows and cols before it as active', () => {
const prevRow = Math.max(1, row - 1);
const prevCol = Math.max(1, col - 1);
- expect(wrapper.findByTestId(`table-${prevRow}-${prevCol}`).element).toHaveClass(
- 'gl-bg-blue-50!',
- );
+ expect(wrapper.findByTestId(`table-${prevRow}-${prevCol}`).element).toHaveClass('active');
});
it('shows a help text indicating the size of the table being inserted', () => {
@@ -95,8 +87,8 @@ describe('content_editor/components/toolbar_table_button', () => {
});
});
- it('does not create more buttons than a 8x8 grid', async () => {
- for (let i = 3; i < 8; i += 1) {
+ it('does not create more buttons than a 10x10 grid', async () => {
+ for (let i = 5; i < 10; i += 1) {
expect(getNumButtons()).toBe(i * i);
// eslint-disable-next-line no-await-in-loop
@@ -104,6 +96,6 @@ describe('content_editor/components/toolbar_table_button', () => {
expect(findDropdown().element).toHaveText(`Insert a ${i}x${i} table.`);
}
- expect(getNumButtons()).toBe(64); // 8x8 (and not 9x9)
+ expect(getNumButtons()).toBe(100); // 10x10 (and not 11x11)
});
});
diff --git a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
index 9a46e27404f..65c1c8c8310 100644
--- a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
@@ -1,11 +1,12 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
import ToolbarTextStyleDropdown from '~/content_editor/components/toolbar_text_style_dropdown.vue';
import { TEXT_STYLE_DROPDOWN_ITEMS } from '~/content_editor/constants';
-import { tiptapExtension as Heading } from '~/content_editor/extensions/heading';
-import { createTestEditor, mockChainedCommands } from '../test_utils';
+import Heading from '~/content_editor/extensions/heading';
+import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
-describe('content_editor/components/toolbar_headings_dropdown', () => {
+describe('content_editor/components/toolbar_text_style_dropdown', () => {
let wrapper;
let tiptapEditor;
@@ -22,9 +23,12 @@ describe('content_editor/components/toolbar_headings_dropdown', () => {
stubs: {
GlDropdown,
GlDropdownItem,
+ EditorStateObserver,
},
- propsData: {
+ provide: {
tiptapEditor,
+ },
+ propsData: {
...propsData,
},
});
@@ -50,7 +54,7 @@ describe('content_editor/components/toolbar_headings_dropdown', () => {
describe('when there is an active item ', () => {
let activeTextStyle;
- beforeEach(() => {
+ beforeEach(async () => {
[, activeTextStyle] = TEXT_STYLE_DROPDOWN_ITEMS;
tiptapEditor.isActive.mockImplementation(
@@ -59,6 +63,7 @@ describe('content_editor/components/toolbar_headings_dropdown', () => {
);
buildWrapper();
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
});
it('displays the active text style label as the dropdown toggle text ', () => {
@@ -79,9 +84,10 @@ describe('content_editor/components/toolbar_headings_dropdown', () => {
});
describe('when there isn’t an active item', () => {
- beforeEach(() => {
+ beforeEach(async () => {
tiptapEditor.isActive.mockReturnValue(false);
buildWrapper();
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
});
it('sets dropdown as disabled', () => {
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
index 5411793cd5e..a5df3d73289 100644
--- a/spec/frontend/content_editor/components/top_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -1,39 +1,23 @@
-import { shallowMount } from '@vue/test-utils';
import { mockTracking } from 'helpers/tracking_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TopToolbar from '~/content_editor/components/top_toolbar.vue';
import {
TOOLBAR_CONTROL_TRACKING_ACTION,
CONTENT_EDITOR_TRACKING_LABEL,
} from '~/content_editor/constants';
-import { createContentEditor } from '~/content_editor/services/create_content_editor';
describe('content_editor/components/top_toolbar', () => {
let wrapper;
- let contentEditor;
let trackingSpy;
- const buildEditor = () => {
- contentEditor = createContentEditor({ renderMarkdown: () => true });
- };
const buildWrapper = () => {
- wrapper = extendedWrapper(
- shallowMount(TopToolbar, {
- propsData: {
- contentEditor,
- },
- }),
- );
+ wrapper = shallowMountExtended(TopToolbar);
};
beforeEach(() => {
trackingSpy = mockTracking(undefined, null, jest.spyOn);
});
- beforeEach(() => {
- buildEditor();
- });
-
afterEach(() => {
wrapper.destroy();
});
@@ -58,18 +42,17 @@ describe('content_editor/components/top_toolbar', () => {
});
it('renders the toolbar control with the provided properties', () => {
- expect(wrapper.findByTestId(testId).props()).toEqual({
- ...controlProps,
- tiptapEditor: contentEditor.tiptapEditor,
+ expect(wrapper.findByTestId(testId).exists()).toBe(true);
+
+ Object.keys(controlProps).forEach((propName) => {
+ expect(wrapper.findByTestId(testId).props(propName)).toBe(controlProps[propName]);
});
});
- it.each`
- eventData
- ${{ contentType: 'bold' }}
- ${{ contentType: 'blockquote', value: 1 }}
- `('tracks the execution of toolbar controls', ({ eventData }) => {
+ it('tracks the execution of toolbar controls', () => {
+ const eventData = { contentType: 'blockquote', value: 1 };
const { contentType, value } = eventData;
+
wrapper.findByTestId(testId).vm.$emit('execute', eventData);
expect(trackingSpy).toHaveBeenCalledWith(undefined, TOOLBAR_CONTROL_TRACKING_ACTION, {
diff --git a/spec/frontend/content_editor/extensions/attachment_spec.js b/spec/frontend/content_editor/extensions/attachment_spec.js
new file mode 100644
index 00000000000..1334b1ddaad
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/attachment_spec.js
@@ -0,0 +1,235 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { once } from 'lodash';
+import waitForPromises from 'helpers/wait_for_promises';
+import Attachment from '~/content_editor/extensions/attachment';
+import Image from '~/content_editor/extensions/image';
+import Link from '~/content_editor/extensions/link';
+import Loading from '~/content_editor/extensions/loading';
+import httpStatus from '~/lib/utils/http_status';
+import { loadMarkdownApiResult } from '../markdown_processing_examples';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/attachment', () => {
+ let tiptapEditor;
+ let eq;
+ let doc;
+ let p;
+ let image;
+ let loading;
+ let link;
+ let renderMarkdown;
+ let mock;
+
+ const uploadsPath = '/uploads/';
+ const imageFile = new File(['foo'], 'test-file.png', { type: 'image/png' });
+ const attachmentFile = new File(['foo'], 'test-file.zip', { type: 'application/zip' });
+
+ beforeEach(() => {
+ renderMarkdown = jest.fn();
+
+ tiptapEditor = createTestEditor({
+ extensions: [Loading, Link, Image, Attachment.configure({ renderMarkdown, uploadsPath })],
+ });
+
+ ({
+ builders: { doc, p, image, loading, link },
+ eq,
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ loading: { markType: Loading.name },
+ image: { nodeType: Image.name },
+ link: { nodeType: Link.name },
+ },
+ }));
+
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ it.each`
+ eventType | propName | eventData | output
+ ${'paste'} | ${'handlePaste'} | ${{ clipboardData: { files: [attachmentFile] } }} | ${true}
+ ${'paste'} | ${'handlePaste'} | ${{ clipboardData: { files: [] } }} | ${undefined}
+ ${'drop'} | ${'handleDrop'} | ${{ dataTransfer: { files: [attachmentFile] } }} | ${true}
+ `('handles $eventType properly', ({ eventType, propName, eventData, output }) => {
+ const event = Object.assign(new Event(eventType), eventData);
+ const handled = tiptapEditor.view.someProp(propName, (eventHandler) => {
+ return eventHandler(tiptapEditor.view, event);
+ });
+
+ expect(handled).toBe(output);
+ });
+
+ describe('uploadAttachment command', () => {
+ let initialDoc;
+ beforeEach(() => {
+ initialDoc = doc(p(''));
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ });
+
+ describe('when the file has image mime type', () => {
+ const base64EncodedFile = 'data:image/png;base64,Zm9v';
+
+ beforeEach(() => {
+ renderMarkdown.mockResolvedValue(
+ loadMarkdownApiResult('project_wiki_attachment_image').body,
+ );
+ });
+
+ describe('when uploading succeeds', () => {
+ const successResponse = {
+ link: {
+ markdown: '![test-file](test-file.png)',
+ },
+ };
+
+ beforeEach(() => {
+ mock.onPost().reply(httpStatus.OK, successResponse);
+ });
+
+ it('inserts an image with src set to the encoded image file and uploading true', (done) => {
+ const expectedDoc = doc(p(image({ uploading: true, src: base64EncodedFile })));
+
+ tiptapEditor.on(
+ 'update',
+ once(() => {
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ done();
+ }),
+ );
+
+ tiptapEditor.commands.uploadAttachment({ file: imageFile });
+ });
+
+ it('updates the inserted image with canonicalSrc when upload is successful', async () => {
+ const expectedDoc = doc(
+ p(
+ image({
+ canonicalSrc: 'test-file.png',
+ src: base64EncodedFile,
+ alt: 'test-file',
+ uploading: false,
+ }),
+ ),
+ );
+
+ tiptapEditor.commands.uploadAttachment({ file: imageFile });
+
+ await waitForPromises();
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+
+ describe('when uploading request fails', () => {
+ beforeEach(() => {
+ mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
+ });
+
+ it('resets the doc to orginal state', async () => {
+ const expectedDoc = doc(p(''));
+
+ tiptapEditor.commands.uploadAttachment({ file: imageFile });
+
+ await waitForPromises();
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+
+ it('emits an error event that includes an error message', (done) => {
+ tiptapEditor.commands.uploadAttachment({ file: imageFile });
+
+ tiptapEditor.on('error', ({ error }) => {
+ expect(error).toBe('An error occurred while uploading the image. Please try again.');
+ done();
+ });
+ });
+ });
+ });
+
+ describe('when the file has a zip (or any other attachment) mime type', () => {
+ const markdownApiResult = loadMarkdownApiResult('project_wiki_attachment_link').body;
+
+ beforeEach(() => {
+ renderMarkdown.mockResolvedValue(markdownApiResult);
+ });
+
+ describe('when uploading succeeds', () => {
+ const successResponse = {
+ link: {
+ markdown: '[test-file](test-file.zip)',
+ },
+ };
+
+ beforeEach(() => {
+ mock.onPost().reply(httpStatus.OK, successResponse);
+ });
+
+ it('inserts a loading mark', (done) => {
+ const expectedDoc = doc(p(loading({ label: 'test-file' })));
+
+ tiptapEditor.on(
+ 'update',
+ once(() => {
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ done();
+ }),
+ );
+
+ tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
+ });
+
+ it('updates the loading mark with a link with canonicalSrc and href attrs', async () => {
+ const [, group, project] = markdownApiResult.match(/\/(group[0-9]+)\/(project[0-9]+)\//);
+ const expectedDoc = doc(
+ p(
+ link(
+ {
+ canonicalSrc: 'test-file.zip',
+ href: `/${group}/${project}/-/wikis/test-file.zip`,
+ },
+ 'test-file',
+ ),
+ ),
+ );
+
+ tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
+
+ await waitForPromises();
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+
+ describe('when uploading request fails', () => {
+ beforeEach(() => {
+ mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
+ });
+
+ it('resets the doc to orginal state', async () => {
+ const expectedDoc = doc(p(''));
+
+ tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
+
+ await waitForPromises();
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+
+ it('emits an error event that includes an error message', (done) => {
+ tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
+
+ tiptapEditor.on('error', ({ error }) => {
+ expect(error).toBe('An error occurred while uploading the file. Please try again.');
+ done();
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/code_block_highlight_spec.js b/spec/frontend/content_editor/extensions/code_block_highlight_spec.js
index cc695ffe241..188e6580dc6 100644
--- a/spec/frontend/content_editor/extensions/code_block_highlight_spec.js
+++ b/spec/frontend/content_editor/extensions/code_block_highlight_spec.js
@@ -1,4 +1,4 @@
-import { tiptapExtension as CodeBlockHighlight } from '~/content_editor/extensions/code_block_highlight';
+import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
import { loadMarkdownApiResult } from '../markdown_processing_examples';
import { createTestEditor } from '../test_utils';
@@ -25,7 +25,6 @@ describe('content_editor/extensions/code_block_highlight', () => {
expect(tiptapEditor.getJSON().content[0].attrs).toMatchObject({
language,
- params: language,
});
});
diff --git a/spec/frontend/content_editor/extensions/emoji_spec.js b/spec/frontend/content_editor/extensions/emoji_spec.js
new file mode 100644
index 00000000000..c1b8dc9bdbb
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/emoji_spec.js
@@ -0,0 +1,57 @@
+import { initEmojiMock } from 'helpers/emoji';
+import Emoji from '~/content_editor/extensions/emoji';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/emoji', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let emoji;
+ let eq;
+
+ beforeEach(async () => {
+ await initEmojiMock();
+ });
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [Emoji] });
+ ({
+ builders: { doc, p, emoji },
+ eq,
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ loading: { nodeType: Emoji.name },
+ },
+ }));
+ });
+
+ describe('when typing a valid emoji input rule', () => {
+ it('inserts an emoji node', () => {
+ const { view } = tiptapEditor;
+ const { selection } = view.state;
+ const expectedDoc = doc(
+ p(
+ ' ',
+ emoji({ moji: '❤', name: 'heart', title: 'heavy black heart', unicodeVersion: '1.1' }),
+ ),
+ );
+ // Triggers the event handler that input rules listen to
+ view.someProp('handleTextInput', (f) => f(view, selection.from, selection.to, ':heart:'));
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+
+ describe('when typing a invalid emoji input rule', () => {
+ it('does not insert an emoji node', () => {
+ const { view } = tiptapEditor;
+ const { selection } = view.state;
+ const invalidEmoji = ':invalid:';
+ const expectedDoc = doc(p());
+ // Triggers the event handler that input rules listen to
+ view.someProp('handleTextInput', (f) => f(view, selection.from, selection.to, invalidEmoji));
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/hard_break_spec.js b/spec/frontend/content_editor/extensions/hard_break_spec.js
index ebd58e60b0c..9e2e28b6e72 100644
--- a/spec/frontend/content_editor/extensions/hard_break_spec.js
+++ b/spec/frontend/content_editor/extensions/hard_break_spec.js
@@ -1,4 +1,4 @@
-import { tiptapExtension as HardBreak } from '~/content_editor/extensions/hard_break';
+import HardBreak from '~/content_editor/extensions/hard_break';
import { createTestEditor, createDocBuilder } from '../test_utils';
describe('content_editor/extensions/hard_break', () => {
diff --git a/spec/frontend/content_editor/extensions/image_spec.js b/spec/frontend/content_editor/extensions/image_spec.js
deleted file mode 100644
index 922966b813a..00000000000
--- a/spec/frontend/content_editor/extensions/image_spec.js
+++ /dev/null
@@ -1,193 +0,0 @@
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
-import { once } from 'lodash';
-import waitForPromises from 'helpers/wait_for_promises';
-import * as Image from '~/content_editor/extensions/image';
-import httpStatus from '~/lib/utils/http_status';
-import { loadMarkdownApiResult } from '../markdown_processing_examples';
-import { createTestEditor, createDocBuilder } from '../test_utils';
-
-describe('content_editor/extensions/image', () => {
- let tiptapEditor;
- let eq;
- let doc;
- let p;
- let image;
- let renderMarkdown;
- let mock;
- const uploadsPath = '/uploads/';
- const validFile = new File(['foo'], 'foo.png', { type: 'image/png' });
- const invalidFile = new File(['foo'], 'bar.html', { type: 'text/html' });
-
- beforeEach(() => {
- renderMarkdown = jest
- .fn()
- .mockResolvedValue(loadMarkdownApiResult('project_wiki_attachment_image').body);
-
- const { tiptapExtension } = Image.configure({ renderMarkdown, uploadsPath });
-
- tiptapEditor = createTestEditor({ extensions: [tiptapExtension] });
-
- ({
- builders: { doc, p, image },
- eq,
- } = createDocBuilder({
- tiptapEditor,
- names: { image: { nodeType: tiptapExtension.name } },
- }));
-
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.reset();
- });
-
- it.each`
- file | valid | description
- ${validFile} | ${true} | ${'handles paste event when mime type is valid'}
- ${invalidFile} | ${false} | ${'does not handle paste event when mime type is invalid'}
- `('$description', ({ file, valid }) => {
- const pasteEvent = Object.assign(new Event('paste'), {
- clipboardData: {
- files: [file],
- },
- });
- let handled;
-
- tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
- handled = eventHandler(tiptapEditor.view, pasteEvent);
- });
-
- expect(handled).toBe(valid);
- });
-
- it.each`
- file | valid | description
- ${validFile} | ${true} | ${'handles drop event when mime type is valid'}
- ${invalidFile} | ${false} | ${'does not handle drop event when mime type is invalid'}
- `('$description', ({ file, valid }) => {
- const dropEvent = Object.assign(new Event('drop'), {
- dataTransfer: {
- files: [file],
- },
- });
- let handled;
-
- tiptapEditor.view.someProp('handleDrop', (eventHandler) => {
- handled = eventHandler(tiptapEditor.view, dropEvent);
- });
-
- expect(handled).toBe(valid);
- });
-
- it('handles paste event when mime type is correct', () => {
- const pasteEvent = Object.assign(new Event('paste'), {
- clipboardData: {
- files: [new File(['foo'], 'foo.png', { type: 'image/png' })],
- },
- });
- const handled = tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
- return eventHandler(tiptapEditor.view, pasteEvent);
- });
-
- expect(handled).toBe(true);
- });
-
- describe('uploadImage command', () => {
- describe('when file has correct mime type', () => {
- let initialDoc;
- const base64EncodedFile = 'data:image/png;base64,Zm9v';
-
- beforeEach(() => {
- initialDoc = doc(p(''));
- tiptapEditor.commands.setContent(initialDoc.toJSON());
- });
-
- describe('when uploading image succeeds', () => {
- const successResponse = {
- link: {
- markdown: '[image](/uploads/25265/image.png)',
- },
- };
-
- beforeEach(() => {
- mock.onPost().reply(httpStatus.OK, successResponse);
- });
-
- it('inserts an image with src set to the encoded image file and uploading true', (done) => {
- const expectedDoc = doc(p(image({ uploading: true, src: base64EncodedFile })));
-
- tiptapEditor.on(
- 'update',
- once(() => {
- expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
- done();
- }),
- );
-
- tiptapEditor.commands.uploadImage({ file: validFile });
- });
-
- it('updates the inserted image with canonicalSrc when upload is successful', async () => {
- const expectedDoc = doc(
- p(
- image({
- canonicalSrc: 'test-file.png',
- src: base64EncodedFile,
- alt: 'test file',
- uploading: false,
- }),
- ),
- );
-
- tiptapEditor.commands.uploadImage({ file: validFile });
-
- await waitForPromises();
-
- expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
- });
- });
-
- describe('when uploading image request fails', () => {
- beforeEach(() => {
- mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
- });
-
- it('resets the doc to orginal state', async () => {
- const expectedDoc = doc(p(''));
-
- tiptapEditor.commands.uploadImage({ file: validFile });
-
- await waitForPromises();
-
- expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
- });
-
- it('emits an error event that includes an error message', (done) => {
- tiptapEditor.commands.uploadImage({ file: validFile });
-
- tiptapEditor.on('error', (message) => {
- expect(message).toBe('An error occurred while uploading the image. Please try again.');
- done();
- });
- });
- });
- });
-
- describe('when file does not have correct mime type', () => {
- let initialDoc;
-
- beforeEach(() => {
- initialDoc = doc(p(''));
- tiptapEditor.commands.setContent(initialDoc.toJSON());
- });
-
- it('does not start the upload image process', () => {
- tiptapEditor.commands.uploadImage({ file: invalidFile });
-
- expect(eq(tiptapEditor.state.doc, initialDoc)).toBe(true);
- });
- });
- });
-});
diff --git a/spec/frontend/content_editor/extensions/inline_diff_spec.js b/spec/frontend/content_editor/extensions/inline_diff_spec.js
new file mode 100644
index 00000000000..63cdf665e7f
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/inline_diff_spec.js
@@ -0,0 +1,27 @@
+import { inputRegexAddition, inputRegexDeletion } from '~/content_editor/extensions/inline_diff';
+
+describe('content_editor/extensions/inline_diff', () => {
+ describe.each`
+ inputRegex | description | input | matches
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'hello{+world+}'} | ${true}
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'hello{+ world +}'} | ${true}
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'hello {+ world+}'} | ${true}
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'{+hello world +}'} | ${true}
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'{+hello with \nnewline+}'} | ${false}
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'{+open only'} | ${false}
+ ${inputRegexAddition} | ${'inputRegexAddition'} | ${'close only+}'} | ${false}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'hello{-world-}'} | ${true}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'hello{- world -}'} | ${true}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'hello {- world-}'} | ${true}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'{-hello world -}'} | ${true}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'{+hello with \nnewline+}'} | ${false}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'{-open only'} | ${false}
+ ${inputRegexDeletion} | ${'inputRegexDeletion'} | ${'close only-}'} | ${false}
+ `('$description', ({ inputRegex, input, matches }) => {
+ it(`${matches ? 'matches' : 'does not match'}: "${input}"`, () => {
+ const match = new RegExp(inputRegex).test(input);
+
+ expect(match).toBe(matches);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_spec.js b/spec/frontend/content_editor/markdown_processing_spec.js
index 028cd6a8271..da3f6e64db8 100644
--- a/spec/frontend/content_editor/markdown_processing_spec.js
+++ b/spec/frontend/content_editor/markdown_processing_spec.js
@@ -1,6 +1,8 @@
import { createContentEditor } from '~/content_editor';
import { loadMarkdownApiExamples, loadMarkdownApiResult } from './markdown_processing_examples';
+jest.mock('~/emoji');
+
describe('markdown processing', () => {
// Ensure we generate same markdown that was provided to Markdown API.
it.each(loadMarkdownApiExamples())(
diff --git a/spec/frontend/content_editor/services/build_serializer_config_spec.js b/spec/frontend/content_editor/services/build_serializer_config_spec.js
deleted file mode 100644
index 532e0493830..00000000000
--- a/spec/frontend/content_editor/services/build_serializer_config_spec.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import * as Blockquote from '~/content_editor/extensions/blockquote';
-import * as Bold from '~/content_editor/extensions/bold';
-import * as Dropcursor from '~/content_editor/extensions/dropcursor';
-import * as Paragraph from '~/content_editor/extensions/paragraph';
-
-import buildSerializerConfig from '~/content_editor/services/build_serializer_config';
-
-describe('content_editor/services/build_serializer_config', () => {
- describe('given one or more content editor extensions', () => {
- it('creates a serializer config that collects all extension serializers by type', () => {
- const extensions = [Bold, Blockquote, Paragraph];
- const serializerConfig = buildSerializerConfig(extensions);
-
- extensions.forEach(({ tiptapExtension, serializer }) => {
- const { name, type } = tiptapExtension;
- expect(serializerConfig[`${type}s`][name]).toBe(serializer);
- });
- });
- });
-
- describe('given an extension without serializer', () => {
- it('does not include the extension in the serializer config', () => {
- const serializerConfig = buildSerializerConfig([Dropcursor]);
-
- expect(serializerConfig.marks[Dropcursor.tiptapExtension.name]).toBe(undefined);
- expect(serializerConfig.nodes[Dropcursor.tiptapExtension.name]).toBe(undefined);
- });
- });
-
- describe('given no extensions', () => {
- it('creates an empty serializer config', () => {
- expect(buildSerializerConfig()).toStrictEqual({
- marks: {},
- nodes: {},
- });
- });
- });
-});
diff --git a/spec/frontend/content_editor/services/content_editor_spec.js b/spec/frontend/content_editor/services/content_editor_spec.js
new file mode 100644
index 00000000000..e48687f1548
--- /dev/null
+++ b/spec/frontend/content_editor/services/content_editor_spec.js
@@ -0,0 +1,68 @@
+import {
+ LOADING_CONTENT_EVENT,
+ LOADING_SUCCESS_EVENT,
+ LOADING_ERROR_EVENT,
+} from '~/content_editor/constants';
+import { ContentEditor } from '~/content_editor/services/content_editor';
+
+import { createTestEditor } from '../test_utils';
+
+describe('content_editor/services/content_editor', () => {
+ let contentEditor;
+ let serializer;
+
+ beforeEach(() => {
+ const tiptapEditor = createTestEditor();
+ jest.spyOn(tiptapEditor, 'destroy');
+
+ serializer = { deserialize: jest.fn() };
+ contentEditor = new ContentEditor({ tiptapEditor, serializer });
+ });
+
+ describe('.dispose', () => {
+ it('destroys the tiptapEditor', () => {
+ expect(contentEditor.tiptapEditor.destroy).not.toHaveBeenCalled();
+
+ contentEditor.dispose();
+
+ expect(contentEditor.tiptapEditor.destroy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when setSerializedContent succeeds', () => {
+ beforeEach(() => {
+ serializer.deserialize.mockResolvedValueOnce('');
+ });
+
+ it('emits loadingContent and loadingSuccess event', () => {
+ let loadingContentEmitted = false;
+
+ contentEditor.on(LOADING_CONTENT_EVENT, () => {
+ loadingContentEmitted = true;
+ });
+ contentEditor.on(LOADING_SUCCESS_EVENT, () => {
+ expect(loadingContentEmitted).toBe(true);
+ });
+
+ contentEditor.setSerializedContent('**bold text**');
+ });
+ });
+
+ describe('when setSerializedContent fails', () => {
+ const error = 'error';
+
+ beforeEach(() => {
+ serializer.deserialize.mockRejectedValueOnce(error);
+ });
+
+ it('emits loadingError event', async () => {
+ contentEditor.on(LOADING_ERROR_EVENT, (e) => {
+ expect(e).toBe('error');
+ });
+
+ await expect(() => contentEditor.setSerializedContent('**bold text**')).rejects.toEqual(
+ error,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/services/create_content_editor_spec.js b/spec/frontend/content_editor/services/create_content_editor_spec.js
index b614efd954a..6b2f28b3306 100644
--- a/spec/frontend/content_editor/services/create_content_editor_spec.js
+++ b/spec/frontend/content_editor/services/create_content_editor_spec.js
@@ -2,7 +2,9 @@ import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '~/content_editor/constants
import { createContentEditor } from '~/content_editor/services/create_content_editor';
import { createTestContentEditorExtension } from '../test_utils';
-describe('content_editor/services/create_editor', () => {
+jest.mock('~/emoji');
+
+describe('content_editor/services/create_content_editor', () => {
let renderMarkdown;
let editor;
const uploadsPath = '/uploads';
@@ -32,13 +34,15 @@ describe('content_editor/services/create_editor', () => {
it('allows providing external content editor extensions', async () => {
const labelReference = 'this is a ~group::editor';
+ const { tiptapExtension, serializer } = createTestContentEditorExtension();
renderMarkdown.mockReturnValueOnce(
'<p>this is a <span data-reference="label" data-label-name="group::editor">group::editor</span></p>',
);
editor = createContentEditor({
renderMarkdown,
- extensions: [createTestContentEditorExtension()],
+ extensions: [tiptapExtension],
+ serializerConfig: { nodes: { [tiptapExtension.name]: serializer } },
});
await editor.setSerializedContent(labelReference);
@@ -50,9 +54,9 @@ describe('content_editor/services/create_editor', () => {
expect(() => createContentEditor()).toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
});
- it('provides uploadsPath and renderMarkdown function to Image extension', () => {
+ it('provides uploadsPath and renderMarkdown function to Attachment extension', () => {
expect(
- editor.tiptapEditor.extensionManager.extensions.find((e) => e.name === 'image').options,
+ editor.tiptapEditor.extensionManager.extensions.find((e) => e.name === 'attachment').options,
).toMatchObject({
uploadsPath,
renderMarkdown,
diff --git a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
index 64f3d8df6e0..afe09a75f16 100644
--- a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
+++ b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
@@ -4,10 +4,10 @@ import {
INPUT_RULE_TRACKING_ACTION,
CONTENT_EDITOR_TRACKING_LABEL,
} from '~/content_editor/constants';
-import { tiptapExtension as BulletList } from '~/content_editor/extensions/bullet_list';
-import { tiptapExtension as CodeBlockLowlight } from '~/content_editor/extensions/code_block_highlight';
-import { tiptapExtension as Heading } from '~/content_editor/extensions/heading';
-import { tiptapExtension as ListItem } from '~/content_editor/extensions/list_item';
+import BulletList from '~/content_editor/extensions/bullet_list';
+import CodeBlockLowlight from '~/content_editor/extensions/code_block_highlight';
+import Heading from '~/content_editor/extensions/heading';
+import ListItem from '~/content_editor/extensions/list_item';
import trackInputRulesAndShortcuts from '~/content_editor/services/track_input_rules_and_shortcuts';
import { ENTER_KEY, BACKSPACE_KEY } from '~/lib/utils/keys';
import { createTestEditor } from '../test_utils';
diff --git a/spec/frontend/content_editor/services/upload_file_spec.js b/spec/frontend/content_editor/services/upload_helpers_spec.js
index 87c5298079e..ee9333232db 100644
--- a/spec/frontend/content_editor/services/upload_file_spec.js
+++ b/spec/frontend/content_editor/services/upload_helpers_spec.js
@@ -1,9 +1,9 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import { uploadFile } from '~/content_editor/services/upload_file';
+import { uploadFile } from '~/content_editor/services/upload_helpers';
import httpStatus from '~/lib/utils/http_status';
-describe('content_editor/services/upload_file', () => {
+describe('content_editor/services/upload_helpers', () => {
const uploadsPath = '/uploads';
const file = new File(['content'], 'file.txt');
// TODO: Replace with automated fixture
diff --git a/spec/frontend/content_editor/test_utils.js b/spec/frontend/content_editor/test_utils.js
index 090e1d92218..b5a2abc2389 100644
--- a/spec/frontend/content_editor/test_utils.js
+++ b/spec/frontend/content_editor/test_utils.js
@@ -4,6 +4,7 @@ import { Paragraph } from '@tiptap/extension-paragraph';
import { Text } from '@tiptap/extension-text';
import { Editor } from '@tiptap/vue-2';
import { builders, eq } from 'prosemirror-test-builder';
+import { nextTick } from 'vue';
export const createDocBuilder = ({ tiptapEditor, names = {} }) => {
const docBuilders = builders(tiptapEditor.schema, {
@@ -14,6 +15,12 @@ export const createDocBuilder = ({ tiptapEditor, names = {} }) => {
return { eq, builders: docBuilders };
};
+export const emitEditorEvent = ({ tiptapEditor, event, params = {} }) => {
+ tiptapEditor.emit(event, { editor: tiptapEditor, ...params });
+
+ return nextTick();
+};
+
/**
* Creates an instance of the Tiptap Editor class
* with a minimal configuration for testing purposes.
diff --git a/spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap b/spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap
deleted file mode 100644
index 1af612ed029..00000000000
--- a/spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap
+++ /dev/null
@@ -1,9 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Value stream analytics component isEmptyStage = true renders the empty stage with \`Not enough data\` message 1`] = `"<gl-empty-state-stub title=\\"We don't have enough data to show this stage.\\" svgpath=\\"path/to/no/data\\" description=\\"The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.\\" class=\\"js-empty-state\\"></gl-empty-state-stub>"`;
-
-exports[`Value stream analytics component isEmptyStage = true with a selectedStageError renders the empty stage with \`There is too much data to calculate\` message 1`] = `"<gl-empty-state-stub title=\\"There is too much data to calculate\\" svgpath=\\"path/to/no/data\\" description=\\"\\" class=\\"js-empty-state\\"></gl-empty-state-stub>"`;
-
-exports[`Value stream analytics component isLoading = true renders the path navigation component with prop \`loading\` set to true 1`] = `"<path-navigation-stub loading=\\"true\\" stages=\\"\\" selectedstage=\\"[object Object]\\" class=\\"js-path-navigation gl-w-full gl-pb-2\\"></path-navigation-stub>"`;
-
-exports[`Value stream analytics component without enough permissions renders the empty stage with \`You need permission\` message 1`] = `"<gl-empty-state-stub title=\\"You need permission.\\" svgpath=\\"path/to/no/access\\" description=\\"Want to see the data? Please ask an administrator for access.\\" class=\\"js-empty-state\\"></gl-empty-state-stub>"`;
diff --git a/spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap b/spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap
new file mode 100644
index 00000000000..e688df8f281
--- /dev/null
+++ b/spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap
@@ -0,0 +1,28 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`TotalTimeComponent with a blank object should render -- 1`] = `"<span class=\\"total-time\\"> -- </span>"`;
+
+exports[`TotalTimeComponent with a valid time object with {"days": 3, "mins": 47, "seconds": 3} 1`] = `
+"<span class=\\"total-time\\">
+ 3 <span>days</span></span>"
+`;
+
+exports[`TotalTimeComponent with a valid time object with {"hours": 7, "mins": 20, "seconds": 10} 1`] = `
+"<span class=\\"total-time\\">
+ 7 <span>hrs</span></span>"
+`;
+
+exports[`TotalTimeComponent with a valid time object with {"hours": 23, "mins": 10} 1`] = `
+"<span class=\\"total-time\\">
+ 23 <span>hrs</span></span>"
+`;
+
+exports[`TotalTimeComponent with a valid time object with {"mins": 47, "seconds": 3} 1`] = `
+"<span class=\\"total-time\\">
+ 47 <span>mins</span></span>"
+`;
+
+exports[`TotalTimeComponent with a valid time object with {"seconds": 35} 1`] = `
+"<span class=\\"total-time\\">
+ 35 <span>s</span></span>"
+`;
diff --git a/spec/frontend/cycle_analytics/base_spec.js b/spec/frontend/cycle_analytics/base_spec.js
index 2f85cc04051..71830eed3ef 100644
--- a/spec/frontend/cycle_analytics/base_spec.js
+++ b/spec/frontend/cycle_analytics/base_spec.js
@@ -5,62 +5,89 @@ import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BaseComponent from '~/cycle_analytics/components/base.vue';
import PathNavigation from '~/cycle_analytics/components/path_navigation.vue';
+import StageTable from '~/cycle_analytics/components/stage_table.vue';
+import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue';
+import { NOT_ENOUGH_DATA_ERROR } from '~/cycle_analytics/constants';
import initState from '~/cycle_analytics/store/state';
-import { selectedStage, convertedEvents as selectedStageEvents } from './mock_data';
-
+import {
+ permissions,
+ transformedProjectStagePathData,
+ selectedStage,
+ issueEvents,
+ createdBefore,
+ createdAfter,
+ currentGroup,
+ stageCounts,
+} from './mock_data';
+
+const selectedStageEvents = issueEvents.events;
const noDataSvgPath = 'path/to/no/data';
const noAccessSvgPath = 'path/to/no/access';
+const selectedStageCount = stageCounts[selectedStage.id];
+const fullPath = 'full/path/to/foo';
Vue.use(Vuex);
let wrapper;
-function createStore({ initialState = {} }) {
+const defaultState = {
+ permissions,
+ currentGroup,
+ createdBefore,
+ createdAfter,
+ stageCounts,
+ endpoints: { fullPath },
+};
+
+function createStore({ initialState = {}, initialGetters = {} }) {
return new Vuex.Store({
state: {
...initState(),
- permissions: {
- [selectedStage.id]: true,
- },
+ ...defaultState,
...initialState,
},
getters: {
- pathNavigationData: () => [],
+ pathNavigationData: () => transformedProjectStagePathData,
+ filterParams: () => ({
+ created_after: createdAfter,
+ created_before: createdBefore,
+ }),
+ ...initialGetters,
},
});
}
-function createComponent({ initialState } = {}) {
+function createComponent({ initialState, initialGetters } = {}) {
return extendedWrapper(
shallowMount(BaseComponent, {
- store: createStore({ initialState }),
+ store: createStore({ initialState, initialGetters }),
propsData: {
noDataSvgPath,
noAccessSvgPath,
},
+ stubs: {
+ StageTable,
+ },
}),
);
}
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findPathNavigation = () => wrapper.findComponent(PathNavigation);
-const findOverviewMetrics = () => wrapper.findByTestId('vsa-stage-overview-metrics');
-const findStageTable = () => wrapper.findByTestId('vsa-stage-table');
-const findEmptyStage = () => wrapper.findComponent(GlEmptyState);
-const findStageEvents = () => wrapper.findByTestId('stage-table-events');
+const findOverviewMetrics = () => wrapper.findComponent(ValueStreamMetrics);
+const findStageTable = () => wrapper.findComponent(StageTable);
+const findStageEvents = () => findStageTable().props('stageEvents');
+const findEmptyStageTitle = () => wrapper.findComponent(GlEmptyState).props('title');
+
+const hasMetricsRequests = (reqs) => {
+ const foundReqs = findOverviewMetrics().props('requests');
+ expect(foundReqs.length).toEqual(reqs.length);
+ expect(foundReqs.map(({ name }) => name)).toEqual(reqs);
+};
describe('Value stream analytics component', () => {
beforeEach(() => {
- wrapper = createComponent({
- initialState: {
- isLoading: false,
- isLoadingStage: false,
- isEmptyStage: false,
- selectedStageEvents,
- selectedStage,
- selectedStageError: '',
- },
- });
+ wrapper = createComponent({ initialState: { selectedStage, selectedStageEvents } });
});
afterEach(() => {
@@ -72,23 +99,44 @@ describe('Value stream analytics component', () => {
expect(findPathNavigation().exists()).toBe(true);
});
+ it('receives the stages formatted for the path navigation', () => {
+ expect(findPathNavigation().props('stages')).toBe(transformedProjectStagePathData);
+ });
+
it('renders the overview metrics', () => {
expect(findOverviewMetrics().exists()).toBe(true);
});
+ it('passes requests prop to the metrics component', () => {
+ hasMetricsRequests(['recent activity']);
+ });
+
it('renders the stage table', () => {
expect(findStageTable().exists()).toBe(true);
});
+ it('passes the selected stage count to the stage table', () => {
+ expect(findStageTable().props('stageCount')).toBe(selectedStageCount);
+ });
+
it('renders the stage table events', () => {
- expect(findEmptyStage().exists()).toBe(false);
- expect(findStageEvents().exists()).toBe(true);
+ expect(findStageEvents()).toEqual(selectedStageEvents);
});
it('does not render the loading icon', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
+ describe('with `cycleAnalyticsForGroups=true` license', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ initialState: { features: { cycleAnalyticsForGroups: true } } });
+ });
+
+ it('passes requests prop to the metrics component', () => {
+ hasMetricsRequests(['time summary', 'recent activity']);
+ });
+ });
+
describe('isLoading = true', () => {
beforeEach(() => {
wrapper = createComponent({
@@ -97,17 +145,17 @@ describe('Value stream analytics component', () => {
});
it('renders the path navigation component with prop `loading` set to true', () => {
- expect(findPathNavigation().html()).toMatchSnapshot();
- });
-
- it('does not render the overview metrics', () => {
- expect(findOverviewMetrics().exists()).toBe(false);
+ expect(findPathNavigation().props('loading')).toBe(true);
});
it('does not render the stage table', () => {
expect(findStageTable().exists()).toBe(false);
});
+ it('renders the overview metrics', () => {
+ expect(findOverviewMetrics().exists()).toBe(true);
+ });
+
it('renders the loading icon', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
@@ -125,32 +173,37 @@ describe('Value stream analytics component', () => {
expect(tableWrapper.exists()).toBe(true);
expect(tableWrapper.find(GlLoadingIcon).exists()).toBe(true);
});
+
+ it('renders the path navigation loading state', () => {
+ expect(findPathNavigation().props('loading')).toBe(true);
+ });
});
describe('isEmptyStage = true', () => {
+ const emptyStageParams = {
+ isEmptyStage: true,
+ selectedStage: { ...selectedStage, emptyStageText: 'This stage is empty' },
+ };
beforeEach(() => {
- wrapper = createComponent({
- initialState: { selectedStage, isEmptyStage: true },
- });
+ wrapper = createComponent({ initialState: emptyStageParams });
});
it('renders the empty stage with `Not enough data` message', () => {
- expect(findEmptyStage().html()).toMatchSnapshot();
+ expect(findEmptyStageTitle()).toBe(NOT_ENOUGH_DATA_ERROR);
});
describe('with a selectedStageError', () => {
beforeEach(() => {
wrapper = createComponent({
initialState: {
- selectedStage,
- isEmptyStage: true,
+ ...emptyStageParams,
selectedStageError: 'There is too much data to calculate',
},
});
});
it('renders the empty stage with `There is too much data to calculate` message', () => {
- expect(findEmptyStage().html()).toMatchSnapshot();
+ expect(findEmptyStageTitle()).toBe('There is too much data to calculate');
});
});
});
@@ -159,21 +212,24 @@ describe('Value stream analytics component', () => {
beforeEach(() => {
wrapper = createComponent({
initialState: {
+ selectedStage,
permissions: {
+ ...permissions,
[selectedStage.id]: false,
},
},
});
});
- it('renders the empty stage with `You need permission` message', () => {
- expect(findEmptyStage().html()).toMatchSnapshot();
+ it('renders the empty stage with `You need permission.` message', () => {
+ expect(findEmptyStageTitle()).toBe('You need permission.');
});
});
describe('without a selected stage', () => {
beforeEach(() => {
wrapper = createComponent({
+ initialGetters: { pathNavigationData: () => [] },
initialState: { selectedStage: null, isEmptyStage: true },
});
});
@@ -182,12 +238,12 @@ describe('Value stream analytics component', () => {
expect(findStageTable().exists()).toBe(true);
});
- it('does not render the path navigation component', () => {
+ it('does not render the path navigation', () => {
expect(findPathNavigation().exists()).toBe(false);
});
it('does not render the stage table events', () => {
- expect(findStageEvents().exists()).toBe(false);
+ expect(findStageEvents()).toHaveLength(0);
});
it('does not render the loading icon', () => {
diff --git a/spec/frontend/cycle_analytics/mock_data.js b/spec/frontend/cycle_analytics/mock_data.js
index 4e6471d5f7b..d9659d5d4c3 100644
--- a/spec/frontend/cycle_analytics/mock_data.js
+++ b/spec/frontend/cycle_analytics/mock_data.js
@@ -1,3 +1,4 @@
+import { getJSONFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import { DEFAULT_VALUE_STREAM, DEFAULT_DAYS_IN_PAST } from '~/cycle_analytics/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
@@ -6,11 +7,33 @@ import { getDateInPast } from '~/lib/utils/datetime_utility';
export const createdBefore = new Date(2019, 0, 14);
export const createdAfter = getDateInPast(createdBefore, DEFAULT_DAYS_IN_PAST);
+export const deepCamelCase = (obj) => convertObjectPropsToCamelCase(obj, { deep: true });
+
export const getStageByTitle = (stages, title) =>
stages.find((stage) => stage.title && stage.title.toLowerCase().trim() === title) || {};
+const fixtureEndpoints = {
+ customizableCycleAnalyticsStagesAndEvents: 'projects/analytics/value_stream_analytics/stages',
+ stageEvents: (stage) => `projects/analytics/value_stream_analytics/events/${stage}`,
+ metricsData: 'projects/analytics/value_stream_analytics/summary',
+};
+
+export const metricsData = getJSONFixture(fixtureEndpoints.metricsData);
+
+export const customizableStagesAndEvents = getJSONFixture(
+ fixtureEndpoints.customizableCycleAnalyticsStagesAndEvents,
+);
+
export const defaultStages = ['issue', 'plan', 'review', 'code', 'test', 'staging'];
+const stageFixtures = defaultStages.reduce((acc, stage) => {
+ const events = getJSONFixture(fixtureEndpoints.stageEvents(stage));
+ return {
+ ...acc,
+ [stage]: events,
+ };
+}, {});
+
export const summary = [
{ value: '20', title: 'New Issues' },
{ value: null, title: 'Commits' },
@@ -18,7 +41,7 @@ export const summary = [
{ value: null, title: 'Deployment Frequency', unit: 'per day' },
];
-const issueStage = {
+export const issueStage = {
id: 'issue',
title: 'Issue',
name: 'issue',
@@ -27,7 +50,7 @@ const issueStage = {
value: null,
};
-const planStage = {
+export const planStage = {
id: 'plan',
title: 'Plan',
name: 'plan',
@@ -36,7 +59,7 @@ const planStage = {
value: 75600,
};
-const codeStage = {
+export const codeStage = {
id: 'code',
title: 'Code',
name: 'code',
@@ -45,7 +68,7 @@ const codeStage = {
value: 172800,
};
-const testStage = {
+export const testStage = {
id: 'test',
title: 'Test',
name: 'test',
@@ -54,7 +77,7 @@ const testStage = {
value: 17550,
};
-const reviewStage = {
+export const reviewStage = {
id: 'review',
title: 'Review',
name: 'review',
@@ -63,7 +86,7 @@ const reviewStage = {
value: null,
};
-const stagingStage = {
+export const stagingStage = {
id: 'staging',
title: 'Staging',
name: 'staging',
@@ -79,7 +102,7 @@ export const selectedStage = {
isUserAllowed: true,
emptyStageText:
'The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.',
- component: 'stage-issue-component',
+
slug: 'issue',
};
@@ -109,53 +132,30 @@ export const convertedData = {
],
};
-export const rawEvents = [
- {
- title: 'Brockfunc-1617160796',
- author: {
- id: 275,
- name: 'VSM User4',
- username: 'vsm-user-4-1617160796',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/6a6f5480ae582ba68982a34169420747?s=80&d=identicon',
- web_url: 'http://gdk.test:3001/vsm-user-4-1617160796',
- show_status: false,
- path: '/vsm-user-4-1617160796',
- },
- iid: '16',
- total_time: { days: 1, hours: 9 },
- created_at: 'about 1 month ago',
- url: 'http://gdk.test:3001/vsa-life/ror-project-vsa/-/issues/16',
- short_sha: 'some_sha',
- commit_url: 'some_commit_url',
- },
- {
- title: 'Subpod-1617160796',
- author: {
- id: 274,
- name: 'VSM User3',
- username: 'vsm-user-3-1617160796',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/fde853fc3ab7dc552e649dcb4fcf5f7f?s=80&d=identicon',
- web_url: 'http://gdk.test:3001/vsm-user-3-1617160796',
- show_status: false,
- path: '/vsm-user-3-1617160796',
- },
- iid: '20',
- total_time: { days: 2, hours: 18 },
- created_at: 'about 1 month ago',
- url: 'http://gdk.test:3001/vsa-life/ror-project-vsa/-/issues/20',
- },
-];
-
-export const convertedEvents = rawEvents.map((ev) =>
- convertObjectPropsToCamelCase(ev, { deep: true }),
-);
+export const rawIssueEvents = stageFixtures.issue;
+export const issueEvents = deepCamelCase(rawIssueEvents);
+export const reviewEvents = deepCamelCase(stageFixtures.review);
export const pathNavIssueMetric = 172800;
+export const rawStageCounts = [
+ { id: 'issue', count: 6 },
+ { id: 'plan', count: 6 },
+ { id: 'code', count: 1 },
+ { id: 'test', count: 5 },
+ { id: 'review', count: 12 },
+ { id: 'staging', count: 3 },
+];
+
+export const stageCounts = {
+ code: 1,
+ issue: 6,
+ plan: 6,
+ review: 12,
+ staging: 3,
+ test: 5,
+};
+
export const rawStageMedians = [
{ id: 'issue', value: 172800 },
{ id: 'plan', value: 86400 },
@@ -189,7 +189,7 @@ export const transformedProjectStagePathData = [
{
metric: 172800,
selected: true,
- stageCount: undefined,
+ stageCount: 6,
icon: null,
id: 'issue',
title: 'Issue',
@@ -201,7 +201,7 @@ export const transformedProjectStagePathData = [
{
metric: 86400,
selected: false,
- stageCount: undefined,
+ stageCount: 6,
icon: null,
id: 'plan',
title: 'Plan',
@@ -213,7 +213,7 @@ export const transformedProjectStagePathData = [
{
metric: 129600,
selected: false,
- stageCount: undefined,
+ stageCount: 1,
icon: null,
id: 'code',
title: 'Code',
@@ -251,46 +251,8 @@ export const selectedProjects = [
},
];
-export const rawValueStreamStages = [
- {
- title: 'Issue',
- hidden: false,
- legend: '',
- description: 'Time before an issue gets scheduled',
- id: 'issue',
- custom: false,
- start_event_html_description:
- '\u003cp data-sourcepos="1:1-1:13" dir="auto"\u003eIssue created\u003c/p\u003e',
- end_event_html_description:
- '\u003cp data-sourcepos="1:1-1:71" dir="auto"\u003eIssue first associated with a milestone or issue first added to a board\u003c/p\u003e',
- },
- {
- title: 'Plan',
- hidden: false,
- legend: '',
- description: 'Time before an issue starts implementation',
- id: 'plan',
- custom: false,
- start_event_html_description:
- '\u003cp data-sourcepos="1:1-1:71" dir="auto"\u003eIssue first associated with a milestone or issue first added to a board\u003c/p\u003e',
- end_event_html_description:
- '\u003cp data-sourcepos="1:1-1:33" dir="auto"\u003eIssue first mentioned in a commit\u003c/p\u003e',
- },
- {
- title: 'Code',
- hidden: false,
- legend: '',
- description: 'Time until first merge request',
- id: 'code',
- custom: false,
- start_event_html_description:
- '\u003cp data-sourcepos="1:1-1:33" dir="auto"\u003eIssue first mentioned in a commit\u003c/p\u003e',
- end_event_html_description:
- '\u003cp data-sourcepos="1:1-1:21" dir="auto"\u003eMerge request created\u003c/p\u003e',
- },
-];
+export const rawValueStreamStages = customizableStagesAndEvents.stages;
-export const valueStreamStages = rawValueStreamStages.map((s) => ({
- ...convertObjectPropsToCamelCase(s, { deep: true }),
- component: `stage-${s.id}-component`,
-}));
+export const valueStreamStages = rawValueStreamStages.map((s) =>
+ convertObjectPropsToCamelCase(s, { deep: true }),
+);
diff --git a/spec/frontend/cycle_analytics/stage_nav_item_spec.js b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
deleted file mode 100644
index d577d0b602a..00000000000
--- a/spec/frontend/cycle_analytics/stage_nav_item_spec.js
+++ /dev/null
@@ -1,152 +0,0 @@
-import { mount, shallowMount } from '@vue/test-utils';
-import StageNavItem from '~/cycle_analytics/components/stage_nav_item.vue';
-
-describe('StageNavItem', () => {
- let wrapper = null;
- const title = 'Cool stage';
- const value = '1 day';
-
- function createComponent(props, shallow = true) {
- const func = shallow ? shallowMount : mount;
- return func(StageNavItem, {
- propsData: {
- isActive: false,
- isUserAllowed: false,
- isDefaultStage: true,
- title,
- value,
- ...props,
- },
- });
- }
-
- function hasStageName() {
- const stageName = wrapper.find('.stage-name');
- expect(stageName.exists()).toBe(true);
- expect(stageName.text()).toEqual(title);
- }
-
- it('renders stage name', () => {
- wrapper = createComponent({ isUserAllowed: true });
- hasStageName();
- wrapper.destroy();
- });
-
- describe('User has access', () => {
- describe('with a value', () => {
- beforeEach(() => {
- wrapper = createComponent({ isUserAllowed: true });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
- it('renders the value for median value', () => {
- expect(wrapper.find('.stage-empty').exists()).toBe(false);
- expect(wrapper.find('.not-available').exists()).toBe(false);
- expect(wrapper.find('.stage-median').text()).toEqual(value);
- });
- });
-
- describe('without a value', () => {
- beforeEach(() => {
- wrapper = createComponent({ isUserAllowed: true, value: null });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('has the stage-empty class', () => {
- expect(wrapper.find('.stage-empty').exists()).toBe(true);
- });
-
- it('renders Not enough data for the median value', () => {
- expect(wrapper.find('.stage-median').text()).toEqual('Not enough data');
- });
- });
- });
-
- describe('is active', () => {
- beforeEach(() => {
- wrapper = createComponent({ isActive: true }, false);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
- it('has the active class', () => {
- expect(wrapper.find('.stage-nav-item').classes('active')).toBe(true);
- });
- });
-
- describe('is not active', () => {
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
- it('emits the `select` event when clicked', () => {
- expect(wrapper.emitted().select).toBeUndefined();
- wrapper.trigger('click');
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.emitted().select.length).toBe(1);
- });
- });
- });
-
- describe('User does not have access', () => {
- beforeEach(() => {
- wrapper = createComponent({ isUserAllowed: false }, false);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
- it('renders stage name', () => {
- hasStageName();
- });
-
- it('has class not-available', () => {
- expect(wrapper.find('.stage-empty').exists()).toBe(false);
- expect(wrapper.find('.not-available').exists()).toBe(true);
- });
-
- it('renders Not available for the median value', () => {
- expect(wrapper.find('.stage-median').text()).toBe('Not available');
- });
- it('does not render options menu', () => {
- expect(wrapper.find('[data-testid="more-actions-toggle"]').exists()).toBe(false);
- });
- });
-
- describe('User can edit stages', () => {
- beforeEach(() => {
- wrapper = createComponent({ isUserAllowed: true }, false);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
- it('renders stage name', () => {
- hasStageName();
- });
-
- it('does not render options menu', () => {
- expect(wrapper.find('[data-testid="more-actions-toggle"]').exists()).toBe(false);
- });
-
- it('can not edit the stage', () => {
- expect(wrapper.text()).not.toContain('Edit stage');
- });
- it('can not remove the stage', () => {
- expect(wrapper.text()).not.toContain('Remove stage');
- });
-
- it('can not hide the stage', () => {
- expect(wrapper.text()).not.toContain('Hide stage');
- });
- });
-});
diff --git a/spec/frontend/cycle_analytics/stage_table_spec.js b/spec/frontend/cycle_analytics/stage_table_spec.js
new file mode 100644
index 00000000000..47a2ce4444b
--- /dev/null
+++ b/spec/frontend/cycle_analytics/stage_table_spec.js
@@ -0,0 +1,279 @@
+import { GlEmptyState, GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import StageTable from '~/cycle_analytics/components/stage_table.vue';
+import { PAGINATION_SORT_FIELD_DURATION } from '~/cycle_analytics/constants';
+import { issueEvents, issueStage, reviewStage, reviewEvents } from './mock_data';
+
+let wrapper = null;
+let trackingSpy = null;
+
+const noDataSvgPath = 'path/to/no/data';
+const emptyStateTitle = 'Too much data';
+const notEnoughDataError = "We don't have enough data to show this stage.";
+const issueEventItems = issueEvents.events;
+const reviewEventItems = reviewEvents.events;
+const [firstIssueEvent] = issueEventItems;
+const [firstReviewEvent] = reviewEventItems;
+const pagination = { page: 1, hasNextPage: true };
+
+const findStageEvents = () => wrapper.findAllByTestId('vsa-stage-event');
+const findPagination = () => wrapper.findByTestId('vsa-stage-pagination');
+const findTable = () => wrapper.findComponent(GlTable);
+const findTableHead = () => wrapper.find('thead');
+const findStageEventTitle = (ev) => extendedWrapper(ev).findByTestId('vsa-stage-event-title');
+const findStageTime = () => wrapper.findByTestId('vsa-stage-event-time');
+const findIcon = (name) => wrapper.findByTestId(`${name}-icon`);
+
+function createComponent(props = {}, shallow = false) {
+ const func = shallow ? shallowMount : mount;
+ return extendedWrapper(
+ func(StageTable, {
+ propsData: {
+ isLoading: false,
+ stageEvents: issueEventItems,
+ noDataSvgPath,
+ selectedStage: issueStage,
+ pagination,
+ ...props,
+ },
+ stubs: {
+ GlLoadingIcon,
+ GlEmptyState,
+ },
+ }),
+ );
+}
+
+describe('StageTable', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('is loaded with data', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('will render the correct events', () => {
+ const evs = findStageEvents();
+ expect(evs).toHaveLength(issueEventItems.length);
+
+ const titles = evs.wrappers.map((ev) => findStageEventTitle(ev).text());
+ issueEventItems.forEach((ev, index) => {
+ expect(titles[index]).toBe(ev.title);
+ });
+ });
+
+ it('will not display the default data message', () => {
+ expect(wrapper.html()).not.toContain(notEnoughDataError);
+ });
+ });
+
+ describe('with minimal stage data', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ currentStage: { title: 'New stage title' } });
+ });
+
+ it('will render the correct events', () => {
+ const evs = findStageEvents();
+ expect(evs).toHaveLength(issueEventItems.length);
+
+ const titles = evs.wrappers.map((ev) => findStageEventTitle(ev).text());
+ issueEventItems.forEach((ev, index) => {
+ expect(titles[index]).toBe(ev.title);
+ });
+ });
+ });
+
+ describe('default event', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ stageEvents: [{ ...firstIssueEvent }],
+ selectedStage: { ...issueStage, custom: false },
+ });
+ });
+
+ it('will render the event title', () => {
+ expect(wrapper.findByTestId('vsa-stage-event-title').text()).toBe(firstIssueEvent.title);
+ });
+
+ it('will set the workflow title to "Issues"', () => {
+ expect(findTableHead().text()).toContain('Issues');
+ });
+
+ it('does not render the fork icon', () => {
+ expect(findIcon('fork').exists()).toBe(false);
+ });
+
+ it('does not render the branch icon', () => {
+ expect(findIcon('commit').exists()).toBe(false);
+ });
+
+ it('will render the total time', () => {
+ const createdAt = firstIssueEvent.createdAt.replace(' ago', '');
+ expect(findStageTime().text()).toBe(createdAt);
+ });
+
+ it('will render the author', () => {
+ expect(wrapper.findByTestId('vsa-stage-event-author').text()).toContain(
+ firstIssueEvent.author.name,
+ );
+ });
+
+ it('will render the created at date', () => {
+ expect(wrapper.findByTestId('vsa-stage-event-date').text()).toContain(
+ firstIssueEvent.createdAt,
+ );
+ });
+ });
+
+ describe('merge request event', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ stageEvents: [{ ...firstReviewEvent }],
+ selectedStage: { ...reviewStage, custom: false },
+ });
+ });
+
+ it('will set the workflow title to "Merge requests"', () => {
+ expect(findTableHead().text()).toContain('Merge requests');
+ expect(findTableHead().text()).not.toContain('Issues');
+ });
+ });
+
+ describe('isLoading = true', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ isLoading: true }, true);
+ });
+
+ it('will display the loading icon', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('will not display pagination', () => {
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
+
+ describe('with no stageEvents', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ stageEvents: [] });
+ });
+
+ it('will render the empty state', () => {
+ expect(wrapper.findComponent(GlEmptyState).exists()).toBe(true);
+ });
+
+ it('will display the default no data message', () => {
+ expect(wrapper.html()).toContain(notEnoughDataError);
+ });
+
+ it('will not display the pagination component', () => {
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
+
+ describe('emptyStateTitle set', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ stageEvents: [], emptyStateTitle });
+ });
+
+ it('will display the custom message', () => {
+ expect(wrapper.html()).not.toContain(notEnoughDataError);
+ expect(wrapper.html()).toContain(emptyStateTitle);
+ });
+ });
+
+ describe('Pagination', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ wrapper.destroy();
+ });
+
+ it('will display the pagination component', () => {
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('clicking prev or next will emit an event', async () => {
+ expect(wrapper.emitted('handleUpdatePagination')).toBeUndefined();
+
+ findPagination().vm.$emit('input', 2);
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted('handleUpdatePagination')[0]).toEqual([{ page: 2 }]);
+ });
+
+ it('clicking prev or next will send tracking information', () => {
+ findPagination().vm.$emit('input', 2);
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_button', { label: 'pagination' });
+ });
+
+ describe('with `hasNextPage=false', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ pagination: { page: 1, hasNextPage: false } });
+ });
+
+ it('will not display the pagination component', () => {
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Sorting', () => {
+ const triggerTableSort = (sortDesc = true) =>
+ findTable().vm.$emit('sort-changed', {
+ sortBy: PAGINATION_SORT_FIELD_DURATION,
+ sortDesc,
+ });
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ wrapper.destroy();
+ });
+
+ it('clicking a table column will send tracking information', () => {
+ triggerTableSort();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: 'sort_duration_desc',
+ });
+ });
+
+ it('clicking a table column will update the sort field', () => {
+ expect(wrapper.emitted('handleUpdatePagination')).toBeUndefined();
+ triggerTableSort();
+
+ expect(wrapper.emitted('handleUpdatePagination')[0]).toEqual([
+ {
+ direction: 'desc',
+ sort: 'duration',
+ },
+ ]);
+ });
+
+ it('with sortDesc=false will toggle the direction field', async () => {
+ expect(wrapper.emitted('handleUpdatePagination')).toBeUndefined();
+ triggerTableSort(false);
+
+ expect(wrapper.emitted('handleUpdatePagination')[0]).toEqual([
+ {
+ direction: 'asc',
+ sort: 'duration',
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js
index 8a8dd374f8e..915a828ff19 100644
--- a/spec/frontend/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/cycle_analytics/store/actions_spec.js
@@ -2,39 +2,23 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/cycle_analytics/store/actions';
+import * as getters from '~/cycle_analytics/store/getters';
import httpStatusCodes from '~/lib/utils/http_status';
import { allowedStages, selectedStage, selectedValueStream } from '../mock_data';
const mockRequestPath = 'some/cool/path';
const mockFullPath = '/namespace/-/analytics/value_stream_analytics/value_streams';
const mockStartDate = 30;
-const mockRequestedDataActions = ['fetchValueStreams', 'fetchCycleAnalyticsData'];
-const mockInitializeActionCommit = {
- payload: { requestPath: mockRequestPath },
- type: 'INITIALIZE_VSA',
-};
+const mockEndpoints = { fullPath: mockFullPath, requestPath: mockRequestPath };
const mockSetDateActionCommit = { payload: { startDate: mockStartDate }, type: 'SET_DATE_RANGE' };
-const mockRequestedDataMutations = [
- {
- payload: true,
- type: 'SET_LOADING',
- },
- {
- payload: false,
- type: 'SET_LOADING',
- },
-];
-
-const features = {
- cycleAnalyticsForGroups: true,
-};
+
+const defaultState = { ...getters, selectedValueStream };
describe('Project Value Stream Analytics actions', () => {
let state;
let mock;
beforeEach(() => {
- state = {};
mock = new MockAdapter(axios);
});
@@ -45,28 +29,62 @@ describe('Project Value Stream Analytics actions', () => {
const mutationTypes = (arr) => arr.map(({ type }) => type);
+ const mockFetchStageDataActions = [
+ { type: 'setLoading', payload: true },
+ { type: 'fetchCycleAnalyticsData' },
+ { type: 'fetchStageData' },
+ { type: 'fetchStageMedians' },
+ { type: 'setLoading', payload: false },
+ ];
+
describe.each`
- action | payload | expectedActions | expectedMutations
- ${'initializeVsa'} | ${{ requestPath: mockRequestPath }} | ${mockRequestedDataActions} | ${[mockInitializeActionCommit, ...mockRequestedDataMutations]}
- ${'setDateRange'} | ${{ startDate: mockStartDate }} | ${mockRequestedDataActions} | ${[mockSetDateActionCommit, ...mockRequestedDataMutations]}
- ${'setSelectedStage'} | ${{ selectedStage }} | ${['fetchStageData']} | ${[{ type: 'SET_SELECTED_STAGE', payload: { selectedStage } }]}
- ${'setSelectedValueStream'} | ${{ selectedValueStream }} | ${['fetchValueStreamStages']} | ${[{ type: 'SET_SELECTED_VALUE_STREAM', payload: { selectedValueStream } }]}
+ action | payload | expectedActions | expectedMutations
+ ${'setLoading'} | ${true} | ${[]} | ${[{ type: 'SET_LOADING', payload: true }]}
+ ${'setDateRange'} | ${{ startDate: mockStartDate }} | ${mockFetchStageDataActions} | ${[mockSetDateActionCommit]}
+ ${'setFilters'} | ${[]} | ${mockFetchStageDataActions} | ${[]}
+ ${'setSelectedStage'} | ${{ selectedStage }} | ${[{ type: 'fetchStageData' }]} | ${[{ type: 'SET_SELECTED_STAGE', payload: { selectedStage } }]}
+ ${'setSelectedValueStream'} | ${{ selectedValueStream }} | ${[{ type: 'fetchValueStreamStages' }, { type: 'fetchCycleAnalyticsData' }]} | ${[{ type: 'SET_SELECTED_VALUE_STREAM', payload: { selectedValueStream } }]}
`('$action', ({ action, payload, expectedActions, expectedMutations }) => {
const types = mutationTypes(expectedMutations);
-
it(`will dispatch ${expectedActions} and commit ${types}`, () =>
testAction({
action: actions[action],
state,
payload,
expectedMutations,
- expectedActions: expectedActions.map((a) => ({ type: a })),
+ expectedActions,
}));
});
+ describe('initializeVsa', () => {
+ let mockDispatch;
+ let mockCommit;
+ const payload = { endpoints: mockEndpoints };
+
+ beforeEach(() => {
+ mockDispatch = jest.fn(() => Promise.resolve());
+ mockCommit = jest.fn();
+ });
+
+ it('will dispatch the setLoading and fetchValueStreams actions and commit INITIALIZE_VSA', async () => {
+ await actions.initializeVsa(
+ {
+ ...state,
+ dispatch: mockDispatch,
+ commit: mockCommit,
+ },
+ payload,
+ );
+ expect(mockCommit).toHaveBeenCalledWith('INITIALIZE_VSA', { endpoints: mockEndpoints });
+ expect(mockDispatch).toHaveBeenCalledWith('setLoading', true);
+ expect(mockDispatch).toHaveBeenCalledWith('fetchValueStreams');
+ expect(mockDispatch).toHaveBeenCalledWith('setLoading', false);
+ });
+ });
+
describe('fetchCycleAnalyticsData', () => {
beforeEach(() => {
- state = { requestPath: mockRequestPath };
+ state = { endpoints: mockEndpoints };
mock = new MockAdapter(axios);
mock.onGet(mockRequestPath).reply(httpStatusCodes.OK);
});
@@ -85,7 +103,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
- state = { requestPath: mockRequestPath };
+ state = { endpoints: mockEndpoints };
mock = new MockAdapter(axios);
mock.onGet(mockRequestPath).reply(httpStatusCodes.BAD_REQUEST);
});
@@ -105,11 +123,12 @@ describe('Project Value Stream Analytics actions', () => {
});
describe('fetchStageData', () => {
- const mockStagePath = `${mockRequestPath}/events/${selectedStage.name}`;
+ const mockStagePath = /value_streams\/\w+\/stages\/\w+\/records/;
beforeEach(() => {
state = {
- requestPath: mockRequestPath,
+ ...defaultState,
+ endpoints: mockEndpoints,
startDate: mockStartDate,
selectedStage,
};
@@ -131,7 +150,8 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
- requestPath: mockRequestPath,
+ ...defaultState,
+ endpoints: mockEndpoints,
startDate: mockStartDate,
selectedStage,
};
@@ -155,7 +175,8 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
state = {
- requestPath: mockRequestPath,
+ ...defaultState,
+ endpoints: mockEndpoints,
startDate: mockStartDate,
selectedStage,
};
@@ -179,8 +200,7 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
- features,
- fullPath: mockFullPath,
+ endpoints: mockEndpoints,
};
mock = new MockAdapter(axios);
mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
@@ -196,29 +216,10 @@ describe('Project Value Stream Analytics actions', () => {
{ type: 'receiveValueStreamsSuccess' },
{ type: 'setSelectedStage' },
{ type: 'fetchStageMedians' },
+ { type: 'fetchStageCountValues' },
],
}));
- describe('with cycleAnalyticsForGroups=false', () => {
- beforeEach(() => {
- state = {
- features: { cycleAnalyticsForGroups: false },
- fullPath: mockFullPath,
- };
- mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
- });
-
- it("does not dispatch the 'fetchStageMedians' request", () =>
- testAction({
- action: actions.fetchValueStreams,
- state,
- payload: {},
- expectedMutations: [{ type: 'REQUEST_VALUE_STREAMS' }],
- expectedActions: [{ type: 'receiveValueStreamsSuccess' }, { type: 'setSelectedStage' }],
- }));
- });
-
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -271,7 +272,7 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
- fullPath: mockFullPath,
+ endpoints: mockEndpoints,
selectedValueStream,
};
mock = new MockAdapter(axios);
@@ -364,4 +365,64 @@ describe('Project Value Stream Analytics actions', () => {
}));
});
});
+
+ describe('fetchStageCountValues', () => {
+ const mockValueStreamPath = /count/;
+ const stageCountsPayload = [
+ { id: 'issue', count: 1 },
+ { id: 'plan', count: 2 },
+ { id: 'code', count: 3 },
+ ];
+
+ const stageCountError = new Error(
+ `Request failed with status code ${httpStatusCodes.BAD_REQUEST}`,
+ );
+
+ beforeEach(() => {
+ state = {
+ fullPath: mockFullPath,
+ selectedValueStream,
+ stages: allowedStages,
+ };
+ mock = new MockAdapter(axios);
+ mock
+ .onGet(mockValueStreamPath)
+ .replyOnce(httpStatusCodes.OK, { count: 1 })
+ .onGet(mockValueStreamPath)
+ .replyOnce(httpStatusCodes.OK, { count: 2 })
+ .onGet(mockValueStreamPath)
+ .replyOnce(httpStatusCodes.OK, { count: 3 });
+ });
+
+ it(`commits the 'REQUEST_STAGE_COUNTS' and 'RECEIVE_STAGE_COUNTS_SUCCESS' mutations`, () =>
+ testAction({
+ action: actions.fetchStageCountValues,
+ state,
+ payload: {},
+ expectedMutations: [
+ { type: 'REQUEST_STAGE_COUNTS' },
+ { type: 'RECEIVE_STAGE_COUNTS_SUCCESS', payload: stageCountsPayload },
+ ],
+ expectedActions: [],
+ }));
+
+ describe('with a failing request', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST);
+ });
+
+ it(`commits the 'RECEIVE_STAGE_COUNTS_ERROR' mutation`, () =>
+ testAction({
+ action: actions.fetchStageCountValues,
+ state,
+ payload: {},
+ expectedMutations: [
+ { type: 'REQUEST_STAGE_COUNTS' },
+ { type: 'RECEIVE_STAGE_COUNTS_ERROR', payload: stageCountError },
+ ],
+ expectedActions: [],
+ }));
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/store/getters_spec.js b/spec/frontend/cycle_analytics/store/getters_spec.js
index 5745e9d7902..c47a30a5f79 100644
--- a/spec/frontend/cycle_analytics/store/getters_spec.js
+++ b/spec/frontend/cycle_analytics/store/getters_spec.js
@@ -4,12 +4,13 @@ import {
stageMedians,
transformedProjectStagePathData,
selectedStage,
+ stageCounts,
} from '../mock_data';
describe('Value stream analytics getters', () => {
describe('pathNavigationData', () => {
it('returns the transformed data', () => {
- const state = { stages: allowedStages, medians: stageMedians, selectedStage };
+ const state = { stages: allowedStages, medians: stageMedians, selectedStage, stageCounts };
expect(getters.pathNavigationData(state)).toEqual(transformedProjectStagePathData);
});
});
diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js
index 77b19280517..7fcfef98547 100644
--- a/spec/frontend/cycle_analytics/store/mutations_spec.js
+++ b/spec/frontend/cycle_analytics/store/mutations_spec.js
@@ -4,30 +4,29 @@ import * as types from '~/cycle_analytics/store/mutation_types';
import mutations from '~/cycle_analytics/store/mutations';
import {
selectedStage,
- rawEvents,
- convertedEvents,
- rawData,
- convertedData,
+ rawIssueEvents,
+ issueEvents,
selectedValueStream,
rawValueStreamStages,
valueStreamStages,
rawStageMedians,
formattedStageMedians,
+ rawStageCounts,
+ stageCounts,
} from '../mock_data';
let state;
+const rawEvents = rawIssueEvents.events;
+const convertedEvents = issueEvents.events;
const mockRequestPath = 'fake/request/path';
const mockCreatedAfter = '2020-06-18';
const mockCreatedBefore = '2020-07-18';
-const features = {
- cycleAnalyticsForGroups: true,
-};
describe('Project Value Stream Analytics mutations', () => {
useFakeDate(2020, 6, 18);
beforeEach(() => {
- state = { features };
+ state = {};
});
afterEach(() => {
@@ -58,26 +57,48 @@ describe('Project Value Stream Analytics mutations', () => {
${types.RECEIVE_STAGE_DATA_ERROR} | ${'isEmptyStage'} | ${true}
${types.REQUEST_STAGE_MEDIANS} | ${'medians'} | ${{}}
${types.RECEIVE_STAGE_MEDIANS_ERROR} | ${'medians'} | ${{}}
+ ${types.REQUEST_STAGE_COUNTS} | ${'stageCounts'} | ${{}}
+ ${types.RECEIVE_STAGE_COUNTS_ERROR} | ${'stageCounts'} | ${{}}
`('$mutation will set $stateKey to $value', ({ mutation, stateKey, value }) => {
- mutations[mutation](state, {});
+ mutations[mutation](state);
expect(state).toMatchObject({ [stateKey]: value });
});
+ const mockInitialPayload = {
+ endpoints: { requestPath: mockRequestPath },
+ currentGroup: { title: 'cool-group' },
+ id: 1337,
+ };
+ const mockInitializedObj = {
+ endpoints: { requestPath: mockRequestPath },
+ createdAfter: mockCreatedAfter,
+ createdBefore: mockCreatedBefore,
+ };
+
it.each`
- mutation | payload | stateKey | value
- ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath}
- ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'startDate'} | ${DEFAULT_DAYS_TO_DISPLAY}
- ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'createdAfter'} | ${mockCreatedAfter}
- ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'createdBefore'} | ${mockCreatedBefore}
- ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
- ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
- ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary}
- ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
- ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
- ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
- ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
+ mutation | stateKey | value
+ ${types.INITIALIZE_VSA} | ${'endpoints'} | ${{ requestPath: mockRequestPath }}
+ ${types.INITIALIZE_VSA} | ${'createdAfter'} | ${mockCreatedAfter}
+ ${types.INITIALIZE_VSA} | ${'createdBefore'} | ${mockCreatedBefore}
+ `('$mutation will set $stateKey', ({ mutation, stateKey, value }) => {
+ mutations[mutation](state, { ...mockInitialPayload });
+
+ expect(state).toMatchObject({ ...mockInitializedObj, [stateKey]: value });
+ });
+
+ it.each`
+ mutation | payload | stateKey | value
+ ${types.SET_DATE_RANGE} | ${DEFAULT_DAYS_TO_DISPLAY} | ${'daysInPast'} | ${DEFAULT_DAYS_TO_DISPLAY}
+ ${types.SET_DATE_RANGE} | ${DEFAULT_DAYS_TO_DISPLAY} | ${'createdAfter'} | ${mockCreatedAfter}
+ ${types.SET_DATE_RANGE} | ${DEFAULT_DAYS_TO_DISPLAY} | ${'createdBefore'} | ${mockCreatedBefore}
+ ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
+ ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
+ ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
+ ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
+ ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
+ ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
+ ${types.RECEIVE_STAGE_COUNTS_SUCCESS} | ${rawStageCounts} | ${'stageCounts'} | ${stageCounts}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {
@@ -95,41 +116,10 @@ describe('Project Value Stream Analytics mutations', () => {
});
it.each`
- mutation | payload | stateKey | value
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: [] }} | ${'isEmptyStage'} | ${true}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: rawEvents }} | ${'selectedStageEvents'} | ${convertedEvents}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: rawEvents }} | ${'isEmptyStage'} | ${false}
- `(
- '$mutation with $payload will set $stateKey to $value',
- ({ mutation, payload, stateKey, value }) => {
- mutations[mutation](state, payload);
-
- expect(state).toMatchObject({ [stateKey]: value });
- },
- );
- });
-
- describe('with cycleAnalyticsForGroups=false', () => {
- useFakeDate(2020, 6, 18);
-
- beforeEach(() => {
- state = { features: { cycleAnalyticsForGroups: false } };
- });
-
- const formattedMedians = {
- code: '2d',
- issue: '-',
- plan: '21h',
- review: '-',
- staging: '2d',
- test: '4h',
- };
-
- it.each`
- mutation | payload | stateKey | value
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'medians'} | ${formattedMedians}
- ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${{}} | ${'medians'} | ${{}}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${{}} | ${'medians'} | ${{}}
+ mutation | payload | stateKey | value
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${[]} | ${'isEmptyStage'} | ${true}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${rawEvents} | ${'selectedStageEvents'} | ${convertedEvents}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${rawEvents} | ${'isEmptyStage'} | ${false}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {
diff --git a/spec/frontend/cycle_analytics/total_time_component_spec.js b/spec/frontend/cycle_analytics/total_time_component_spec.js
index e831bc311ed..9003c0330c0 100644
--- a/spec/frontend/cycle_analytics/total_time_component_spec.js
+++ b/spec/frontend/cycle_analytics/total_time_component_spec.js
@@ -1,11 +1,11 @@
-import { shallowMount } from '@vue/test-utils';
-import TotalTime from '~/cycle_analytics/components/total_time_component.vue';
+import { mount } from '@vue/test-utils';
+import TotalTimeComponent from '~/cycle_analytics/components/total_time_component.vue';
-describe('Total time component', () => {
- let wrapper;
+describe('TotalTimeComponent', () => {
+ let wrapper = null;
const createComponent = (propsData) => {
- wrapper = shallowMount(TotalTime, {
+ return mount(TotalTimeComponent, {
propsData,
});
};
@@ -14,45 +14,32 @@ describe('Total time component', () => {
wrapper.destroy();
});
- describe('With data', () => {
- it('should render information for days and hours', () => {
- createComponent({
- time: {
- days: 3,
- hours: 4,
- },
+ describe('with a valid time object', () => {
+ it.each`
+ time
+ ${{ seconds: 35 }}
+ ${{ mins: 47, seconds: 3 }}
+ ${{ days: 3, mins: 47, seconds: 3 }}
+ ${{ hours: 23, mins: 10 }}
+ ${{ hours: 7, mins: 20, seconds: 10 }}
+ `('with $time', ({ time }) => {
+ wrapper = createComponent({
+ time,
});
- expect(wrapper.text()).toMatchInterpolatedText('3 days 4 hrs');
- });
-
- it('should render information for hours and minutes', () => {
- createComponent({
- time: {
- hours: 4,
- mins: 35,
- },
- });
-
- expect(wrapper.text()).toMatchInterpolatedText('4 hrs 35 mins');
+ expect(wrapper.html()).toMatchSnapshot();
});
+ });
- it('should render information for seconds', () => {
- createComponent({
- time: {
- seconds: 45,
- },
+ describe('with a blank object', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ time: {},
});
-
- expect(wrapper.text()).toMatchInterpolatedText('45 s');
});
- });
-
- describe('Without data', () => {
- it('should render no information', () => {
- createComponent();
- expect(wrapper.text()).toBe('--');
+ it('should render --', () => {
+ expect(wrapper.html()).toMatchSnapshot();
});
});
});
diff --git a/spec/frontend/cycle_analytics/utils_spec.js b/spec/frontend/cycle_analytics/utils_spec.js
index 1fecdfc0539..69fed879fd8 100644
--- a/spec/frontend/cycle_analytics/utils_spec.js
+++ b/spec/frontend/cycle_analytics/utils_spec.js
@@ -1,70 +1,24 @@
import { useFakeDate } from 'helpers/fake_date';
import {
- decorateEvents,
- decorateData,
transformStagesForPathNavigation,
timeSummaryForPathNavigation,
medianTimeToParsedSeconds,
formatMedianValues,
filterStagesByHiddenStatus,
calculateFormattedDayInPast,
+ prepareTimeMetricsData,
} from '~/cycle_analytics/utils';
+import { slugify } from '~/lib/utils/text_utility';
import {
selectedStage,
- rawData,
- convertedData,
- rawEvents,
allowedStages,
stageMedians,
pathNavIssueMetric,
rawStageMedians,
+ metricsData,
} from './mock_data';
describe('Value stream analytics utils', () => {
- describe('decorateEvents', () => {
- const [result] = decorateEvents(rawEvents, selectedStage);
- const eventKeys = Object.keys(result);
- const authorKeys = Object.keys(result.author);
- it('will return the same number of events', () => {
- expect(decorateEvents(rawEvents, selectedStage).length).toBe(rawEvents.length);
- });
-
- it('will set all the required event fields', () => {
- ['totalTime', 'author', 'createdAt', 'shortSha', 'commitUrl'].forEach((key) => {
- expect(eventKeys).toContain(key);
- });
- ['webUrl', 'avatarUrl'].forEach((key) => {
- expect(authorKeys).toContain(key);
- });
- });
-
- it('will remove unused fields', () => {
- ['total_time', 'created_at', 'short_sha', 'commit_url'].forEach((key) => {
- expect(eventKeys).not.toContain(key);
- });
-
- ['web_url', 'avatar_url'].forEach((key) => {
- expect(authorKeys).not.toContain(key);
- });
- });
- });
-
- describe('decorateData', () => {
- const result = decorateData(rawData);
- it('returns the summary data', () => {
- expect(result.summary).toEqual(convertedData.summary);
- });
-
- it('returns `-` for summary data that has no value', () => {
- const singleSummaryResult = decorateData({
- stats: [],
- permissions: { issue: true },
- summary: [{ value: null, title: 'Commits' }],
- });
- expect(singleSummaryResult.summary).toEqual([{ value: '-', title: 'Commits' }]);
- });
- });
-
describe('transformStagesForPathNavigation', () => {
const stages = allowedStages;
const response = transformStagesForPathNavigation({
@@ -159,4 +113,32 @@ describe('Value stream analytics utils', () => {
expect(calculateFormattedDayInPast(5)).toEqual({ now: '1815-12-10', past: '1815-12-05' });
});
});
+
+ describe('prepareTimeMetricsData', () => {
+ let prepared;
+ const [first, second] = metricsData;
+ const firstKey = slugify(first.title);
+ const secondKey = slugify(second.title);
+
+ beforeEach(() => {
+ prepared = prepareTimeMetricsData([first, second], {
+ [firstKey]: { description: 'Is a value that is good' },
+ });
+ });
+
+ it('will add a `key` based on the title', () => {
+ expect(prepared).toMatchObject([{ key: firstKey }, { key: secondKey }]);
+ });
+
+ it('will add a `label` key', () => {
+ expect(prepared).toMatchObject([{ label: 'New Issues' }, { label: 'Commits' }]);
+ });
+
+ it('will add a popover description using the key if it is provided', () => {
+ expect(prepared).toMatchObject([
+ { description: 'Is a value that is good' },
+ { description: '' },
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
new file mode 100644
index 00000000000..ffdb49a828c
--- /dev/null
+++ b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
@@ -0,0 +1,128 @@
+import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSingleStat } from '@gitlab/ui/dist/charts';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import { METRIC_TYPE_SUMMARY } from '~/api/analytics_api';
+import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue';
+import createFlash from '~/flash';
+import { group, metricsData } from './mock_data';
+
+jest.mock('~/flash');
+
+describe('ValueStreamMetrics', () => {
+ let wrapper;
+ let mockGetValueStreamSummaryMetrics;
+
+ const { full_path: requestPath } = group;
+ const fakeReqName = 'Mock metrics';
+ const metricsRequestFactory = () => ({
+ request: mockGetValueStreamSummaryMetrics,
+ endpoint: METRIC_TYPE_SUMMARY,
+ name: fakeReqName,
+ });
+
+ const createComponent = ({ requestParams = {} } = {}) => {
+ return shallowMount(ValueStreamMetrics, {
+ propsData: {
+ requestPath,
+ requestParams,
+ requests: [metricsRequestFactory()],
+ },
+ });
+ };
+
+ const findMetrics = () => wrapper.findAllComponents(GlSingleStat);
+
+ const expectToHaveRequest = (fields) => {
+ expect(mockGetValueStreamSummaryMetrics).toHaveBeenCalledWith({
+ endpoint: METRIC_TYPE_SUMMARY,
+ requestPath,
+ ...fields,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('with successful requests', () => {
+ beforeEach(() => {
+ mockGetValueStreamSummaryMetrics = jest.fn().mockResolvedValue({ data: metricsData });
+ wrapper = createComponent();
+ });
+
+ it('will display a loader with pending requests', async () => {
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlSkeletonLoading).exists()).toBe(true);
+ });
+
+ describe('with data loaded', () => {
+ beforeEach(async () => {
+ await waitForPromises();
+ });
+
+ it('fetches data from the value stream analytics endpoint', () => {
+ expectToHaveRequest({ params: {} });
+ });
+
+ it.each`
+ index | value | title | unit
+ ${0} | ${metricsData[0].value} | ${metricsData[0].title} | ${metricsData[0].unit}
+ ${1} | ${metricsData[1].value} | ${metricsData[1].title} | ${metricsData[1].unit}
+ ${2} | ${metricsData[2].value} | ${metricsData[2].title} | ${metricsData[2].unit}
+ ${3} | ${metricsData[3].value} | ${metricsData[3].title} | ${metricsData[3].unit}
+ `(
+ 'renders a single stat component for the $title with value and unit',
+ ({ index, value, title, unit }) => {
+ const metric = findMetrics().at(index);
+ expect(metric.props()).toMatchObject({ value, title, unit: unit ?? '' });
+ },
+ );
+
+ it('will not display a loading icon', () => {
+ expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
+ });
+
+ describe('with additional params', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({
+ requestParams: {
+ 'project_ids[]': [1],
+ created_after: '2020-01-01',
+ created_before: '2020-02-01',
+ },
+ });
+
+ await waitForPromises();
+ });
+
+ it('fetches data for the `getValueStreamSummaryMetrics` request', () => {
+ expectToHaveRequest({
+ params: {
+ 'project_ids[]': [1],
+ created_after: '2020-01-01',
+ created_before: '2020-02-01',
+ },
+ });
+ });
+ });
+ });
+ });
+
+ describe('with a request failing', () => {
+ beforeEach(async () => {
+ mockGetValueStreamSummaryMetrics = jest.fn().mockRejectedValue();
+ wrapper = createComponent();
+
+ await waitForPromises();
+ });
+
+ it('it should render an error message', () => {
+ expect(createFlash).toHaveBeenCalledWith({
+ message: `There was an error while fetching value stream analytics ${fakeReqName} data.`,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
index efadb9b717d..9335d800a16 100644
--- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
@@ -17,6 +17,8 @@ const defaultMockDiscussion = {
notes,
};
+const DEFAULT_TODO_COUNT = 2;
+
describe('Design discussions component', () => {
let wrapper;
@@ -41,8 +43,14 @@ describe('Design discussions component', () => {
},
};
const mutate = jest.fn().mockResolvedValue({ data: { createNote: { errors: [] } } });
+ const readQuery = jest.fn().mockReturnValue({
+ project: {
+ issue: { designCollection: { designs: { nodes: [{ currentUserTodos: { nodes: [] } }] } } },
+ },
+ });
const $apollo = {
mutate,
+ provider: { clients: { defaultClient: { readQuery } } },
};
function createComponent(props = {}, data = {}) {
@@ -69,6 +77,12 @@ describe('Design discussions component', () => {
$apollo,
$route: {
hash: '#note_1',
+ params: {
+ id: 1,
+ },
+ query: {
+ version: null,
+ },
},
},
});
@@ -138,7 +152,13 @@ describe('Design discussions component', () => {
});
describe('when discussion is resolved', () => {
+ let dispatchEventSpy;
+
beforeEach(() => {
+ dispatchEventSpy = jest.spyOn(document, 'dispatchEvent');
+ jest.spyOn(document, 'querySelector').mockReturnValue({
+ innerText: DEFAULT_TODO_COUNT,
+ });
createComponent({
discussion: {
...defaultMockDiscussion,
@@ -174,6 +194,24 @@ describe('Design discussions component', () => {
expect(findResolveIcon().props('name')).toBe('check-circle-filled');
});
+ it('emit todo:toggle when discussion is resolved', async () => {
+ createComponent(
+ { discussionWithOpenForm: defaultMockDiscussion.id },
+ { discussionComment: 'test', isFormRendered: true },
+ );
+ findResolveButton().trigger('click');
+ findReplyForm().vm.$emit('submitForm');
+
+ await mutate();
+ await wrapper.vm.$nextTick();
+
+ const dispatchedEvent = dispatchEventSpy.mock.calls[0][0];
+
+ expect(dispatchEventSpy).toHaveBeenCalledTimes(1);
+ expect(dispatchedEvent.detail).toEqual({ count: DEFAULT_TODO_COUNT });
+ expect(dispatchedEvent.type).toBe('todo:toggle');
+ });
+
describe('when replies are expanded', () => {
beforeEach(() => {
findRepliesWidget().vm.$emit('toggle');
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
index 63afc3f000d..637f22457c4 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
@@ -17,13 +17,31 @@ exports[`Design management design version dropdown component renders design vers
iconname=""
iconrightarialabel=""
iconrightname=""
+ ischeckcentered="true"
ischecked="true"
ischeckitem="true"
secondarytext=""
>
- Version
- 2
- (latest)
+ <strong>
+ Version
+ 2
+ (latest)
+ </strong>
+
+ <div
+ class="gl-text-gray-600 gl-mt-1"
+ >
+ <div>
+ Adminstrator
+ </div>
+
+ <time-ago-stub
+ class="text-1"
+ cssclass=""
+ time="2021-08-09T06:05:00Z"
+ tooltipplacement="bottom"
+ />
+ </div>
</gl-dropdown-item-stub>
<gl-dropdown-item-stub
avatarurl=""
@@ -31,12 +49,30 @@ exports[`Design management design version dropdown component renders design vers
iconname=""
iconrightarialabel=""
iconrightname=""
+ ischeckcentered="true"
ischeckitem="true"
secondarytext=""
>
- Version
- 1
-
+ <strong>
+ Version
+ 1
+
+ </strong>
+
+ <div
+ class="gl-text-gray-600 gl-mt-1"
+ >
+ <div>
+ Adminstrator
+ </div>
+
+ <time-ago-stub
+ class="text-1"
+ cssclass=""
+ time="2021-08-09T06:05:00Z"
+ tooltipplacement="bottom"
+ />
+ </div>
</gl-dropdown-item-stub>
</gl-dropdown-stub>
`;
@@ -58,13 +94,31 @@ exports[`Design management design version dropdown component renders design vers
iconname=""
iconrightarialabel=""
iconrightname=""
+ ischeckcentered="true"
ischecked="true"
ischeckitem="true"
secondarytext=""
>
- Version
- 2
- (latest)
+ <strong>
+ Version
+ 2
+ (latest)
+ </strong>
+
+ <div
+ class="gl-text-gray-600 gl-mt-1"
+ >
+ <div>
+ Adminstrator
+ </div>
+
+ <time-ago-stub
+ class="text-1"
+ cssclass=""
+ time="2021-08-09T06:05:00Z"
+ tooltipplacement="bottom"
+ />
+ </div>
</gl-dropdown-item-stub>
<gl-dropdown-item-stub
avatarurl=""
@@ -72,12 +126,30 @@ exports[`Design management design version dropdown component renders design vers
iconname=""
iconrightarialabel=""
iconrightname=""
+ ischeckcentered="true"
ischeckitem="true"
secondarytext=""
>
- Version
- 1
-
+ <strong>
+ Version
+ 1
+
+ </strong>
+
+ <div
+ class="gl-text-gray-600 gl-mt-1"
+ >
+ <div>
+ Adminstrator
+ </div>
+
+ <time-ago-stub
+ class="text-1"
+ cssclass=""
+ time="2021-08-09T06:05:00Z"
+ tooltipplacement="bottom"
+ />
+ </div>
</gl-dropdown-item-stub>
</gl-dropdown-stub>
`;
diff --git a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
index 1b01a363688..ebfe27eaa71 100644
--- a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
+++ b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
@@ -1,9 +1,10 @@
import { GlDropdown, GlDropdownItem, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import DesignVersionDropdown from '~/design_management/components/upload/design_version_dropdown.vue';
+import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import mockAllVersions from './mock_data/all_versions';
-const LATEST_VERSION_ID = 3;
+const LATEST_VERSION_ID = 1;
const PREVIOUS_VERSION_ID = 2;
const designRouteFactory = (versionId) => ({
@@ -110,5 +111,13 @@ describe('Design management design version dropdown component', () => {
expect(wrapper.findAll(GlDropdownItem)).toHaveLength(wrapper.vm.allVersions.length);
});
});
+
+ it('should render TimeAgo', async () => {
+ createComponent();
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.findAllComponents(TimeAgo)).toHaveLength(wrapper.vm.allVersions.length);
+ });
});
});
diff --git a/spec/frontend/design_management/components/upload/mock_data/all_versions.js b/spec/frontend/design_management/components/upload/mock_data/all_versions.js
index 237e1654f9b..24c59ce1a75 100644
--- a/spec/frontend/design_management/components/upload/mock_data/all_versions.js
+++ b/spec/frontend/design_management/components/upload/mock_data/all_versions.js
@@ -1,10 +1,20 @@
export default [
{
- id: 'gid://gitlab/DesignManagement::Version/3',
- sha: '0945756378e0b1588b9dd40d5a6b99e8b7198f55',
+ id: 'gid://gitlab/DesignManagement::Version/1',
+ sha: 'b389071a06c153509e11da1f582005b316667001',
+ createdAt: '2021-08-09T06:05:00Z',
+ author: {
+ id: 'gid://gitlab/User/1',
+ name: 'Adminstrator',
+ },
},
{
id: 'gid://gitlab/DesignManagement::Version/2',
- sha: '5b063fef0cd7213b312db65b30e24f057df21b20',
+ sha: 'b389071a06c153509e11da1f582005b316667021',
+ createdAt: '2021-08-09T06:05:00Z',
+ author: {
+ id: 'gid://gitlab/User/1',
+ name: 'Adminstrator',
+ },
},
];
diff --git a/spec/frontend/design_management/mock_data/all_versions.js b/spec/frontend/design_management/mock_data/all_versions.js
index 2b216574e27..f4026da7dfd 100644
--- a/spec/frontend/design_management/mock_data/all_versions.js
+++ b/spec/frontend/design_management/mock_data/all_versions.js
@@ -2,5 +2,19 @@ export default [
{
id: 'gid://gitlab/DesignManagement::Version/1',
sha: 'b389071a06c153509e11da1f582005b316667001',
+ createdAt: '2021-08-09T06:05:00Z',
+ author: {
+ id: 'gid://gitlab/User/1',
+ name: 'Adminstrator',
+ },
+ },
+ {
+ id: 'gid://gitlab/DesignManagement::Version/1',
+ sha: 'b389071a06c153509e11da1f582005b316667021',
+ createdAt: '2021-08-09T06:05:00Z',
+ author: {
+ id: 'gid://gitlab/User/1',
+ name: 'Adminstrator',
+ },
},
];
diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js
index e53ad2e6afe..cdd07a16e90 100644
--- a/spec/frontend/design_management/mock_data/apollo_mock.js
+++ b/spec/frontend/design_management/mock_data/apollo_mock.js
@@ -172,3 +172,40 @@ export const moveDesignMutationResponseWithErrors = {
},
},
};
+
+export const resolveCommentMutationResponse = {
+ discussionToggleResolve: {
+ discussion: {
+ noteable: {
+ id: 'gid://gitlab/DesignManagement::Design/1',
+ currentUserTodos: {
+ nodes: [],
+ __typename: 'TodoConnection',
+ },
+ __typename: 'Design',
+ },
+ __typename: 'Discussion',
+ },
+ errors: [],
+ __typename: 'DiscussionToggleResolvePayload',
+ },
+};
+
+export const getDesignQueryResponse = {
+ project: {
+ issue: {
+ designCollection: {
+ designs: {
+ nodes: [
+ {
+ id: 'gid://gitlab/DesignManagement::Design/1',
+ currentUserTodos: {
+ nodes: [{ id: 'gid://gitlab/Todo::1' }],
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index 03ae77d4977..57023c55878 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -61,6 +61,7 @@ exports[`Design management design index page renders design index 1`] = `
<participants-stub
class="gl-mb-4"
+ lazy="true"
numberoflessparticipants="7"
participants="[object Object]"
/>
@@ -221,6 +222,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
<participants-stub
class="gl-mb-4"
+ lazy="true"
numberoflessparticipants="7"
participants="[object Object]"
/>
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index b5eb3e1713c..1464dd84666 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -4,6 +4,7 @@ import MockAdapter from 'axios-mock-adapter';
import Mousetrap from 'mousetrap';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'spec/test_constants';
import App from '~/diffs/components/app.vue';
import CommitWidget from '~/diffs/components/commit_widget.vue';
@@ -16,7 +17,6 @@ import TreeList from '~/diffs/components/tree_list.vue';
/* You know what: sometimes alphabetical isn't the best order */
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
-import MergeConflictWarning from '~/diffs/components/merge_conflict_warning.vue';
/* eslint-enable import/order */
import axios from '~/lib/utils/axios_utils';
@@ -258,6 +258,8 @@ describe('diffs/components/app', () => {
});
it('marks current diff file based on currently highlighted row', async () => {
+ window.location.hash = 'ABC_123';
+
createComponent({
shouldShow: true,
});
@@ -428,12 +430,9 @@ describe('diffs/components/app', () => {
jest.spyOn(wrapper.vm, 'refetchDiffData').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'adjustView').mockImplementation(() => {});
};
- let location;
- beforeAll(() => {
- location = window.location;
- delete window.location;
- window.location = COMMIT_URL;
+ beforeEach(() => {
+ setWindowLocation(COMMIT_URL);
document.title = 'My Title';
});
@@ -441,10 +440,6 @@ describe('diffs/components/app', () => {
jest.spyOn(urlUtils, 'updateHistory');
});
- afterAll(() => {
- window.location = location;
- });
-
it('when the commit changes and the app is not loading it should update the history, refetch the diff data, and update the view', async () => {
createComponent({}, ({ state }) => {
state.diffs.commit = { ...state.diffs.commit, id: 'OLD' };
@@ -546,43 +541,6 @@ describe('diffs/components/app', () => {
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(false);
});
});
-
- describe('merge conflicts', () => {
- it('should render the merge conflicts banner if viewing the whole changeset and there are conflicts', () => {
- createComponent({}, ({ state }) => {
- Object.assign(state.diffs, {
- latestDiff: true,
- startVersion: null,
- hasConflicts: true,
- canMerge: false,
- conflictResolutionPath: 'path',
- });
- });
-
- expect(wrapper.find(MergeConflictWarning).exists()).toBe(true);
- });
-
- it.each`
- prop | value
- ${'latestDiff'} | ${false}
- ${'startVersion'} | ${'notnull'}
- ${'hasConflicts'} | ${false}
- `(
- "should not render if any of the MR properties aren't correct - like $prop: $value",
- ({ prop, value }) => {
- createComponent({}, ({ state }) => {
- Object.assign(state.diffs, {
- latestDiff: true,
- startVersion: null,
- hasConflicts: true,
- [prop]: value,
- });
- });
-
- expect(wrapper.find(MergeConflictWarning).exists()).toBe(false);
- },
- );
- });
});
it('should display commit widget if store has a commit', () => {
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 80a51ee137a..1c0cb1193fa 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -1,5 +1,6 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { trimText } from 'helpers/text_helper';
import CompareVersionsComponent from '~/diffs/components/compare_versions.vue';
@@ -13,6 +14,10 @@ localVue.use(Vuex);
const NEXT_COMMIT_URL = `${TEST_HOST}/?commit_id=next`;
const PREV_COMMIT_URL = `${TEST_HOST}/?commit_id=prev`;
+beforeEach(() => {
+ setWindowLocation(TEST_HOST);
+});
+
describe('CompareVersions', () => {
let wrapper;
let store;
@@ -215,15 +220,7 @@ describe('CompareVersions', () => {
describe('prev commit', () => {
beforeAll(() => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}?commit_id=${mrCommit.id}`,
- });
- });
-
- afterAll(() => {
- global.jsdom.reconfigure({
- url: TEST_HOST,
- });
+ setWindowLocation(`?commit_id=${mrCommit.id}`);
});
beforeEach(() => {
@@ -258,15 +255,7 @@ describe('CompareVersions', () => {
describe('next commit', () => {
beforeAll(() => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}?commit_id=${mrCommit.id}`,
- });
- });
-
- afterAll(() => {
- global.jsdom.reconfigure({
- url: TEST_HOST,
- });
+ setWindowLocation(`?commit_id=${mrCommit.id}`);
});
beforeEach(() => {
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 99dda8d5deb..3dec56f2fe3 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -521,4 +521,54 @@ describe('DiffFile', () => {
expect(button.attributes('href')).toBe('/file/view/path');
});
});
+
+ it('loads collapsed file on mounted when single file mode is enabled', async () => {
+ wrapper.destroy();
+
+ const file = {
+ ...getReadableFile(),
+ load_collapsed_diff_url: '/diff_for_path',
+ highlighted_diff_lines: [],
+ parallel_diff_lines: [],
+ viewer: { name: 'collapsed', automaticallyCollapsed: true },
+ };
+
+ axiosMock.onGet(file.load_collapsed_diff_url).reply(httpStatus.OK, getReadableFile());
+
+ ({ wrapper, store } = createComponent({ file, props: { viewDiffsFileByFile: true } }));
+
+ await wrapper.vm.$nextTick();
+
+ expect(findLoader(wrapper).exists()).toBe(true);
+ });
+
+ describe('merge conflicts', () => {
+ beforeEach(() => {
+ wrapper.destroy();
+ });
+
+ it('does not render conflict alert', () => {
+ const file = {
+ ...getReadableFile(),
+ conflict_type: null,
+ renderIt: true,
+ };
+
+ ({ wrapper, store } = createComponent({ file }));
+
+ expect(wrapper.find('[data-testid="conflictsAlert"]').exists()).toBe(false);
+ });
+
+ it('renders conflict alert when conflict_type is present', () => {
+ const file = {
+ ...getReadableFile(),
+ conflict_type: 'both_modified',
+ renderIt: true,
+ };
+
+ ({ wrapper, store } = createComponent({ file }));
+
+ expect(wrapper.find('[data-testid="conflictsAlert"]').exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/settings_dropdown_spec.js b/spec/frontend/diffs/components/settings_dropdown_spec.js
index 43b9c5871a6..2dd35519464 100644
--- a/spec/frontend/diffs/components/settings_dropdown_spec.js
+++ b/spec/frontend/diffs/components/settings_dropdown_spec.js
@@ -48,13 +48,17 @@ describe('Diff settings dropdown component', () => {
it('list view button dispatches setRenderTreeList with false', () => {
wrapper.find('.js-list-view').trigger('click');
- expect(store.dispatch).toHaveBeenCalledWith('diffs/setRenderTreeList', false);
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/setRenderTreeList', {
+ renderTreeList: false,
+ });
});
it('tree view button dispatches setRenderTreeList with true', () => {
wrapper.find('.js-tree-view').trigger('click');
- expect(store.dispatch).toHaveBeenCalledWith('diffs/setRenderTreeList', true);
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/setRenderTreeList', {
+ renderTreeList: true,
+ });
});
it('sets list button as selected when renderTreeList is false', () => {
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index c2e5d07bcfd..6d005b868a9 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -874,6 +874,7 @@ describe('DiffsStoreActions', () => {
describe('scrollToFile', () => {
let commit;
+ const getters = { isVirtualScrollingEnabled: false };
beforeEach(() => {
commit = jest.fn();
@@ -888,7 +889,7 @@ describe('DiffsStoreActions', () => {
},
};
- scrollToFile({ state, commit }, 'path');
+ scrollToFile({ state, commit, getters }, 'path');
expect(document.location.hash).toBe('#test');
});
@@ -902,7 +903,7 @@ describe('DiffsStoreActions', () => {
},
};
- scrollToFile({ state, commit }, 'path');
+ scrollToFile({ state, commit, getters }, 'path');
expect(commit).toHaveBeenCalledWith(types.VIEW_DIFF_FILE, 'test');
});
@@ -1000,7 +1001,7 @@ describe('DiffsStoreActions', () => {
it('commits SET_RENDER_TREE_LIST', (done) => {
testAction(
setRenderTreeList,
- true,
+ { renderTreeList: true },
{},
[{ type: types.SET_RENDER_TREE_LIST, payload: true }],
[],
@@ -1009,7 +1010,7 @@ describe('DiffsStoreActions', () => {
});
it('sets localStorage', () => {
- setRenderTreeList({ commit() {} }, true);
+ setRenderTreeList({ commit() {} }, { renderTreeList: true });
expect(localStorage.setItem).toHaveBeenCalledWith('mr_diff_tree_list', true);
});
diff --git a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
index 99f13a1c84c..6ea8f691c3c 100644
--- a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
+++ b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
@@ -1,3 +1,4 @@
+import setWindowLocation from 'helpers/set_window_location_helper';
import {
DIFF_COMPARE_BASE_VERSION_INDEX,
DIFF_COMPARE_HEAD_VERSION_INDEX,
@@ -47,15 +48,12 @@ describe('Compare diff version dropdowns', () => {
let expectedFirstVersion;
let expectedBaseVersion;
let expectedHeadVersion;
- const originalLocation = window.location;
+ const originalLocation = window.location.href;
const setupTest = (includeDiffHeadParam) => {
const diffHeadParam = includeDiffHeadParam ? '?diff_head=true' : '';
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { search: diffHeadParam },
- });
+ setWindowLocation(diffHeadParam);
expectedFirstVersion = {
...diffsMockData[1],
@@ -91,7 +89,7 @@ describe('Compare diff version dropdowns', () => {
};
afterEach(() => {
- window.location = originalLocation;
+ setWindowLocation(originalLocation);
});
it('base version selected', () => {
diff --git a/spec/frontend/diffs/utils/queue_events_spec.js b/spec/frontend/diffs/utils/queue_events_spec.js
new file mode 100644
index 00000000000..007748d8b2c
--- /dev/null
+++ b/spec/frontend/diffs/utils/queue_events_spec.js
@@ -0,0 +1,36 @@
+import api from '~/api';
+import { DEFER_DURATION } from '~/diffs/constants';
+import { queueRedisHllEvents } from '~/diffs/utils/queue_events';
+
+jest.mock('~/api', () => ({
+ trackRedisHllUserEvent: jest.fn(),
+}));
+
+describe('diffs events queue', () => {
+ describe('queueRedisHllEvents', () => {
+ it('does not dispatch the event immediately', () => {
+ queueRedisHllEvents(['know_event']);
+ expect(api.trackRedisHllUserEvent).not.toHaveBeenCalled();
+ });
+
+ it('does dispatch the event after the defer duration', () => {
+ queueRedisHllEvents(['know_event']);
+ jest.advanceTimersByTime(DEFER_DURATION + 1);
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalled();
+ });
+
+ it('increase defer duration based on the provided events count', () => {
+ let deferDuration = DEFER_DURATION + 1;
+ const events = ['know_event_a', 'know_event_b', 'know_event_c'];
+ queueRedisHllEvents(events);
+
+ expect(api.trackRedisHllUserEvent).not.toHaveBeenCalled();
+
+ events.forEach((event, index) => {
+ jest.advanceTimersByTime(deferDuration);
+ expect(api.trackRedisHllUserEvent).toHaveBeenLastCalledWith(event);
+ deferDuration *= index + 1;
+ });
+ });
+ });
+});
diff --git a/spec/frontend/editor/source_editor_extension_base_spec.js b/spec/frontend/editor/source_editor_extension_base_spec.js
index 352db9d0d51..2c06ae03892 100644
--- a/spec/frontend/editor/source_editor_extension_base_spec.js
+++ b/spec/frontend/editor/source_editor_extension_base_spec.js
@@ -1,5 +1,6 @@
import { Range } from 'monaco-editor';
import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
+import setWindowLocation from 'helpers/set_window_location_helper';
import {
ERROR_INSTANCE_REQUIRED_FOR_EXTENSION,
EDITOR_TYPE_CODE,
@@ -152,12 +153,7 @@ describe('The basis for an Source Editor extension', () => {
useFakeRequestAnimationFrame();
beforeEach(() => {
- delete window.location;
- window.location = new URL(`https://localhost`);
- });
-
- afterEach(() => {
- window.location.hash = '';
+ setWindowLocation('https://localhost');
});
it.each`
diff --git a/spec/frontend/editor/source_editor_markdown_ext_spec.js b/spec/frontend/editor/source_editor_markdown_ext_spec.js
index 943e21250b4..48ccc10e486 100644
--- a/spec/frontend/editor/source_editor_markdown_ext_spec.js
+++ b/spec/frontend/editor/source_editor_markdown_ext_spec.js
@@ -1,16 +1,36 @@
-import { Range, Position } from 'monaco-editor';
+import MockAdapter from 'axios-mock-adapter';
+import { Range, Position, editor as monacoEditor } from 'monaco-editor';
+import waitForPromises from 'helpers/wait_for_promises';
+import {
+ EXTENSION_MARKDOWN_PREVIEW_PANEL_CLASS,
+ EXTENSION_MARKDOWN_PREVIEW_ACTION_ID,
+ EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH,
+ EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS,
+ EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY,
+} from '~/editor/constants';
import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext';
import SourceEditor from '~/editor/source_editor';
+import createFlash from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+import syntaxHighlight from '~/syntax_highlight';
+
+jest.mock('~/syntax_highlight');
+jest.mock('~/flash');
describe('Markdown Extension for Source Editor', () => {
let editor;
let instance;
let editorEl;
+ let panelSpy;
+ let mockAxios;
+ const projectPath = 'fooGroup/barProj';
const firstLine = 'This is a';
const secondLine = 'multiline';
const thirdLine = 'string with some **markup**';
const text = `${firstLine}\n${secondLine}\n${thirdLine}`;
- const filePath = 'foo.md';
+ const plaintextPath = 'foo.txt';
+ const markdownPath = 'foo.md';
+ const responseData = '<div>FooBar</div>';
const setSelection = (startLineNumber = 1, startColumn = 1, endLineNumber = 1, endColumn = 1) => {
const selection = new Range(startLineNumber, startColumn, endLineNumber, endColumn);
@@ -22,21 +42,378 @@ describe('Markdown Extension for Source Editor', () => {
const selectionToString = () => instance.getSelection().toString();
const positionToString = () => instance.getPosition().toString();
+ const togglePreview = async () => {
+ instance.togglePreview();
+ await waitForPromises();
+ };
+
beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
setFixtures('<div id="editor" data-editor-loading></div>');
editorEl = document.getElementById('editor');
editor = new SourceEditor();
instance = editor.createInstance({
el: editorEl,
- blobPath: filePath,
+ blobPath: markdownPath,
blobContent: text,
});
- editor.use(new EditorMarkdownExtension());
+ editor.use(new EditorMarkdownExtension({ instance, projectPath }));
+ panelSpy = jest.spyOn(EditorMarkdownExtension, 'togglePreviewPanel');
});
afterEach(() => {
instance.dispose();
editorEl.remove();
+ mockAxios.restore();
+ });
+
+ it('sets up the instance', () => {
+ expect(instance.preview).toEqual({
+ el: undefined,
+ action: expect.any(Object),
+ shown: false,
+ modelChangeListener: undefined,
+ });
+ expect(instance.projectPath).toBe(projectPath);
+ });
+
+ describe('model language changes listener', () => {
+ let cleanupSpy;
+ let actionSpy;
+
+ beforeEach(async () => {
+ cleanupSpy = jest.spyOn(instance, 'cleanup');
+ actionSpy = jest.spyOn(instance, 'setupPreviewAction');
+ await togglePreview();
+ });
+
+ it('cleans up when switching away from markdown', () => {
+ expect(instance.cleanup).not.toHaveBeenCalled();
+ expect(instance.setupPreviewAction).not.toHaveBeenCalled();
+
+ instance.updateModelLanguage(plaintextPath);
+
+ expect(cleanupSpy).toHaveBeenCalled();
+ expect(actionSpy).not.toHaveBeenCalled();
+ });
+
+ it.each`
+ oldLanguage | newLanguage | setupCalledTimes
+ ${'plaintext'} | ${'markdown'} | ${1}
+ ${'markdown'} | ${'markdown'} | ${0}
+ ${'markdown'} | ${'plaintext'} | ${0}
+ ${'markdown'} | ${undefined} | ${0}
+ ${undefined} | ${'markdown'} | ${1}
+ `(
+ 'correctly handles re-enabling of the action when switching from $oldLanguage to $newLanguage',
+ ({ oldLanguage, newLanguage, setupCalledTimes } = {}) => {
+ expect(actionSpy).not.toHaveBeenCalled();
+ instance.updateModelLanguage(oldLanguage);
+ instance.updateModelLanguage(newLanguage);
+ expect(actionSpy).toHaveBeenCalledTimes(setupCalledTimes);
+ },
+ );
+ });
+
+ describe('model change listener', () => {
+ let cleanupSpy;
+ let actionSpy;
+
+ beforeEach(() => {
+ cleanupSpy = jest.spyOn(instance, 'cleanup');
+ actionSpy = jest.spyOn(instance, 'setupPreviewAction');
+ instance.togglePreview();
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('does not do anything if there is no model', () => {
+ instance.setModel(null);
+
+ expect(cleanupSpy).not.toHaveBeenCalled();
+ expect(actionSpy).not.toHaveBeenCalled();
+ });
+
+ it('cleans up the preview when the model changes', () => {
+ instance.setModel(monacoEditor.createModel('foo'));
+ expect(cleanupSpy).toHaveBeenCalled();
+ });
+
+ it.each`
+ language | setupCalledTimes
+ ${'markdown'} | ${1}
+ ${'plaintext'} | ${0}
+ ${undefined} | ${0}
+ `(
+ 'correctly handles actions when the new model is $language',
+ ({ language, setupCalledTimes } = {}) => {
+ instance.setModel(monacoEditor.createModel('foo', language));
+
+ expect(actionSpy).toHaveBeenCalledTimes(setupCalledTimes);
+ },
+ );
+ });
+
+ describe('cleanup', () => {
+ beforeEach(async () => {
+ mockAxios.onPost().reply(200, { body: responseData });
+ await togglePreview();
+ });
+
+ it('disposes the modelChange listener and does not fetch preview on content changes', () => {
+ expect(instance.preview.modelChangeListener).toBeDefined();
+ jest.spyOn(instance, 'fetchPreview');
+
+ instance.cleanup();
+ instance.setValue('Foo Bar');
+ jest.advanceTimersByTime(EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY);
+
+ expect(instance.fetchPreview).not.toHaveBeenCalled();
+ });
+
+ it('removes the contextual menu action', () => {
+ expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBeDefined();
+
+ instance.cleanup();
+
+ expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBe(null);
+ });
+
+ it('toggles the `shown` flag', () => {
+ expect(instance.preview.shown).toBe(true);
+ instance.cleanup();
+ expect(instance.preview.shown).toBe(false);
+ });
+
+ it('toggles the panel only if the preview is visible', () => {
+ const { el: previewEl } = instance.preview;
+ const parentEl = previewEl.parentElement;
+
+ expect(previewEl).toBeVisible();
+ expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(true);
+
+ instance.cleanup();
+ expect(previewEl).toBeHidden();
+ expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(
+ false,
+ );
+
+ instance.cleanup();
+ expect(previewEl).toBeHidden();
+ expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(
+ false,
+ );
+ });
+
+ it('toggles the layout only if the preview is visible', () => {
+ const { width } = instance.getLayoutInfo();
+
+ expect(instance.preview.shown).toBe(true);
+
+ instance.cleanup();
+
+ const { width: newWidth } = instance.getLayoutInfo();
+ expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true);
+
+ instance.cleanup();
+ expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true);
+ });
+ });
+
+ describe('fetchPreview', () => {
+ const group = 'foo';
+ const project = 'bar';
+ const setData = (path, g, p) => {
+ instance.projectPath = path;
+ document.body.setAttribute('data-group', g);
+ document.body.setAttribute('data-project', p);
+ };
+ const fetchPreview = async () => {
+ instance.fetchPreview();
+ await waitForPromises();
+ };
+
+ beforeEach(() => {
+ mockAxios.onPost().reply(200, { body: responseData });
+ });
+
+ it('correctly fetches preview based on projectPath', async () => {
+ setData(projectPath, group, project);
+ await fetchPreview();
+ expect(mockAxios.history.post[0].url).toBe(`/${projectPath}/preview_markdown`);
+ expect(mockAxios.history.post[0].data).toEqual(JSON.stringify({ text }));
+ });
+
+ it('correctly fetches preview based on group and project data attributes', async () => {
+ setData(undefined, group, project);
+ await fetchPreview();
+ expect(mockAxios.history.post[0].url).toBe(`/${group}/${project}/preview_markdown`);
+ expect(mockAxios.history.post[0].data).toEqual(JSON.stringify({ text }));
+ });
+
+ it('puts the fetched content into the preview DOM element', async () => {
+ instance.preview.el = editorEl.parentElement;
+ await fetchPreview();
+ expect(instance.preview.el.innerHTML).toEqual(responseData);
+ });
+
+ it('applies syntax highlighting to the preview content', async () => {
+ instance.preview.el = editorEl.parentElement;
+ await fetchPreview();
+ expect(syntaxHighlight).toHaveBeenCalled();
+ });
+
+ it('catches the errors when fetching the preview', async () => {
+ mockAxios.onPost().reply(500);
+
+ await fetchPreview();
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+
+ describe('setupPreviewAction', () => {
+ it('adds the contextual menu action', () => {
+ expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBeDefined();
+ });
+
+ it('does not set up action if one already exists', () => {
+ jest.spyOn(instance, 'addAction').mockImplementation();
+
+ instance.setupPreviewAction();
+ expect(instance.addAction).not.toHaveBeenCalled();
+ });
+
+ it('toggles preview when the action is triggered', () => {
+ jest.spyOn(instance, 'togglePreview').mockImplementation();
+
+ expect(instance.togglePreview).not.toHaveBeenCalled();
+
+ const action = instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID);
+ action.run();
+
+ expect(instance.togglePreview).toHaveBeenCalled();
+ });
+ });
+
+ describe('togglePreview', () => {
+ beforeEach(() => {
+ mockAxios.onPost().reply(200, { body: responseData });
+ });
+
+ it('toggles preview flag on instance', () => {
+ expect(instance.preview.shown).toBe(false);
+
+ instance.togglePreview();
+ expect(instance.preview.shown).toBe(true);
+
+ instance.togglePreview();
+ expect(instance.preview.shown).toBe(false);
+ });
+
+ describe('panel DOM element set up', () => {
+ it('sets up an element to contain the preview and stores it on instance', () => {
+ expect(instance.preview.el).toBeUndefined();
+
+ instance.togglePreview();
+
+ expect(instance.preview.el).toBeDefined();
+ expect(instance.preview.el.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_CLASS)).toBe(
+ true,
+ );
+ });
+
+ it('re-uses existing preview DOM element on repeated calls', () => {
+ instance.togglePreview();
+ const origPreviewEl = instance.preview.el;
+ instance.togglePreview();
+
+ expect(instance.preview.el).toBe(origPreviewEl);
+ });
+
+ it('hides the preview DOM element by default', () => {
+ panelSpy.mockImplementation();
+ instance.togglePreview();
+ expect(instance.preview.el.style.display).toBe('none');
+ });
+ });
+
+ describe('preview layout setup', () => {
+ it('sets correct preview layout', () => {
+ jest.spyOn(instance, 'layout');
+ const { width, height } = instance.getLayoutInfo();
+
+ instance.togglePreview();
+
+ expect(instance.layout).toHaveBeenCalledWith({
+ width: width * EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH,
+ height,
+ });
+ });
+ });
+
+ describe('preview panel', () => {
+ it('toggles preview CSS class on the editor', () => {
+ expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(
+ false,
+ );
+ instance.togglePreview();
+ expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(
+ true,
+ );
+ instance.togglePreview();
+ expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(
+ false,
+ );
+ });
+
+ it('toggles visibility of the preview DOM element', async () => {
+ await togglePreview();
+ expect(instance.preview.el.style.display).toBe('block');
+ await togglePreview();
+ expect(instance.preview.el.style.display).toBe('none');
+ });
+
+ describe('hidden preview DOM element', () => {
+ it('listens to model changes and re-fetches preview', async () => {
+ expect(mockAxios.history.post).toHaveLength(0);
+ await togglePreview();
+ expect(mockAxios.history.post).toHaveLength(1);
+
+ instance.setValue('New Value');
+ await waitForPromises();
+ expect(mockAxios.history.post).toHaveLength(2);
+ });
+
+ it('stores disposable listener for model changes', async () => {
+ expect(instance.preview.modelChangeListener).toBeUndefined();
+ await togglePreview();
+ expect(instance.preview.modelChangeListener).toBeDefined();
+ });
+ });
+
+ describe('already visible preview', () => {
+ beforeEach(async () => {
+ await togglePreview();
+ mockAxios.resetHistory();
+ });
+
+ it('does not re-fetch the preview', () => {
+ instance.togglePreview();
+ expect(mockAxios.history.post).toHaveLength(0);
+ });
+
+ it('disposes the model change event listener', () => {
+ const disposeSpy = jest.fn();
+ instance.preview.modelChangeListener = {
+ dispose: disposeSpy,
+ };
+ instance.togglePreview();
+ expect(disposeSpy).toHaveBeenCalled();
+ });
+ });
+ });
});
describe('getSelectedText', () => {
diff --git a/spec/frontend/editor/utils_spec.js b/spec/frontend/editor/utils_spec.js
new file mode 100644
index 00000000000..97d3e9e081d
--- /dev/null
+++ b/spec/frontend/editor/utils_spec.js
@@ -0,0 +1,85 @@
+import { editor as monacoEditor } from 'monaco-editor';
+import * as utils from '~/editor/utils';
+import { DEFAULT_THEME } from '~/ide/lib/themes';
+
+describe('Source Editor utils', () => {
+ let el;
+
+ const stubUserColorScheme = (value) => {
+ if (window.gon == null) {
+ window.gon = {};
+ }
+ window.gon.user_color_scheme = value;
+ };
+
+ describe('clearDomElement', () => {
+ beforeEach(() => {
+ setFixtures('<div id="foo"><div id="bar">Foo</div></div>');
+ el = document.getElementById('foo');
+ });
+
+ it('removes all child nodes from an element', () => {
+ expect(el.children.length).toBe(1);
+ utils.clearDomElement(el);
+ expect(el.children.length).toBe(0);
+ });
+ });
+
+ describe('setupEditorTheme', () => {
+ beforeEach(() => {
+ jest.spyOn(monacoEditor, 'defineTheme').mockImplementation();
+ jest.spyOn(monacoEditor, 'setTheme').mockImplementation();
+ });
+
+ it.each`
+ themeName | expectedThemeName
+ ${'solarized-light'} | ${'solarized-light'}
+ ${DEFAULT_THEME} | ${DEFAULT_THEME}
+ ${'non-existent'} | ${DEFAULT_THEME}
+ `(
+ 'sets the $expectedThemeName theme when $themeName is set in the user preference',
+ ({ themeName, expectedThemeName }) => {
+ stubUserColorScheme(themeName);
+ utils.setupEditorTheme();
+
+ expect(monacoEditor.setTheme).toHaveBeenCalledWith(expectedThemeName);
+ },
+ );
+ });
+
+ describe('getBlobLanguage', () => {
+ it.each`
+ path | expectedLanguage
+ ${'foo.js'} | ${'javascript'}
+ ${'foo.js.rb'} | ${'ruby'}
+ ${'foo.bar'} | ${'plaintext'}
+ ${undefined} | ${'plaintext'}
+ `(
+ 'sets the $expectedThemeName theme when $themeName is set in the user preference',
+ ({ path, expectedLanguage }) => {
+ const language = utils.getBlobLanguage(path);
+
+ expect(language).toEqual(expectedLanguage);
+ },
+ );
+ });
+
+ describe('setupCodeSnipet', () => {
+ beforeEach(() => {
+ jest.spyOn(monacoEditor, 'colorizeElement').mockImplementation();
+ jest.spyOn(monacoEditor, 'setTheme').mockImplementation();
+ setFixtures('<pre id="foo"></pre>');
+ el = document.getElementById('foo');
+ });
+
+ it('colorizes the element and applies the preference theme', () => {
+ expect(monacoEditor.colorizeElement).not.toHaveBeenCalled();
+ expect(monacoEditor.setTheme).not.toHaveBeenCalled();
+
+ utils.setupCodeSnippet(el);
+
+ expect(monacoEditor.colorizeElement).toHaveBeenCalledWith(el);
+ expect(monacoEditor.setTheme).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 29aa416149c..cf47a1cd7bb 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -88,13 +88,32 @@ class CustomEnvironment extends JSDOMEnvironment {
}),
});
- this.global.PerformanceObserver = class {
+ /**
+ * JSDom doesn't have an own observer implementation, so this a Noop Observer.
+ * If you are testing functionality, related to observers, have a look at __helpers__/mock_dom_observer.js
+ *
+ * JSDom actually implements a _proper_ MutationObserver, so no need to mock it!
+ */
+ class NoopObserver {
/* eslint-disable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
constructor(callback) {}
disconnect() {}
observe(element, initObject) {}
+ unobserve(element) {}
+ takeRecords() {
+ return [];
+ }
/* eslint-enable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
- };
+ }
+
+ ['IntersectionObserver', 'PerformanceObserver', 'ResizeObserver'].forEach((observer) => {
+ if (this.global[observer]) {
+ throw new Error(
+ `We overwrite an existing Observer in jsdom (${observer}), are you sure you want to do that?`,
+ );
+ }
+ this.global[observer] = NoopObserver;
+ });
}
async teardown() {
diff --git a/spec/frontend/environments/confirm_rollback_modal_spec.js b/spec/frontend/environments/confirm_rollback_modal_spec.js
index 8fb53579f96..d62aaec4f69 100644
--- a/spec/frontend/environments/confirm_rollback_modal_spec.js
+++ b/spec/frontend/environments/confirm_rollback_modal_spec.js
@@ -1,70 +1,104 @@
-import { GlModal } from '@gitlab/ui';
+import { GlModal, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ConfirmRollbackModal from '~/environments/components/confirm_rollback_modal.vue';
import eventHub from '~/environments/event_hub';
describe('Confirm Rollback Modal Component', () => {
let environment;
+ let component;
- beforeEach(() => {
- environment = {
- name: 'test',
- last_deployment: {
- commit: {
- short_id: 'abc0123',
- },
+ const envWithLastDeployment = {
+ name: 'test',
+ last_deployment: {
+ commit: {
+ short_id: 'abc0123',
},
- modalId: 'test',
- };
- });
+ },
+ modalId: 'test',
+ };
- it('should show "Rollback" when isLastDeployment is false', () => {
- const component = shallowMount(ConfirmRollbackModal, {
- propsData: {
- environment: {
- ...environment,
- isLastDeployment: false,
- },
- },
- });
- const modal = component.find(GlModal);
+ const envWithoutLastDeployment = {
+ name: 'test',
+ modalId: 'test',
+ commitShortSha: 'abc0123',
+ commitUrl: 'test/-/commit/abc0123',
+ };
- expect(modal.attributes('title')).toContain('Rollback');
- expect(modal.attributes('title')).toContain('test');
- expect(modal.attributes('ok-title')).toBe('Rollback');
- expect(modal.text()).toContain('commit abc0123');
- expect(modal.text()).toContain('Are you sure you want to continue?');
- });
+ const retryPath = 'test/-/jobs/123/retry';
- it('should show "Re-deploy" when isLastDeployment is true', () => {
- const component = shallowMount(ConfirmRollbackModal, {
+ const createComponent = (props = {}) => {
+ component = shallowMount(ConfirmRollbackModal, {
propsData: {
- environment: {
- ...environment,
- isLastDeployment: true,
- },
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
},
});
- const modal = component.find(GlModal);
+ };
- expect(modal.attributes('title')).toContain('Re-deploy');
- expect(modal.attributes('title')).toContain('test');
- expect(modal.attributes('ok-title')).toBe('Re-deploy');
- expect(modal.text()).toContain('commit abc0123');
- expect(modal.text()).toContain('Are you sure you want to continue?');
- });
+ describe.each`
+ hasMultipleCommits | environmentData | retryUrl | primaryPropsAttrs
+ ${true} | ${envWithLastDeployment} | ${null} | ${[{ variant: 'danger' }]}
+ ${false} | ${envWithoutLastDeployment} | ${retryPath} | ${[{ variant: 'danger' }, { 'data-method': 'post' }, { href: retryPath }]}
+ `(
+ 'when hasMultipleCommits=$hasMultipleCommits',
+ ({ hasMultipleCommits, environmentData, retryUrl, primaryPropsAttrs }) => {
+ beforeEach(() => {
+ environment = environmentData;
+ });
- it('should emit the "rollback" event when "ok" is clicked', () => {
- environment = { ...environment, isLastDeployment: true };
- const component = shallowMount(ConfirmRollbackModal, {
- propsData: {
- environment,
- },
- });
- const eventHubSpy = jest.spyOn(eventHub, '$emit');
- const modal = component.find(GlModal);
- modal.vm.$emit('ok');
+ it('should show "Rollback" when isLastDeployment is false', () => {
+ createComponent({
+ environment: {
+ ...environment,
+ isLastDeployment: false,
+ },
+ hasMultipleCommits,
+ retryUrl,
+ });
+ const modal = component.find(GlModal);
+
+ expect(modal.attributes('title')).toContain('Rollback');
+ expect(modal.attributes('title')).toContain('test');
+ expect(modal.props('actionPrimary').text).toBe('Rollback');
+ expect(modal.props('actionPrimary').attributes).toEqual(primaryPropsAttrs);
+ expect(modal.text()).toContain('commit abc0123');
+ expect(modal.text()).toContain('Are you sure you want to continue?');
+ });
+
+ it('should show "Re-deploy" when isLastDeployment is true', () => {
+ createComponent({
+ environment: {
+ ...environment,
+ isLastDeployment: true,
+ },
+ hasMultipleCommits,
+ });
+
+ const modal = component.find(GlModal);
+
+ expect(modal.attributes('title')).toContain('Re-deploy');
+ expect(modal.attributes('title')).toContain('test');
+ expect(modal.props('actionPrimary').text).toBe('Re-deploy');
+ expect(modal.text()).toContain('commit abc0123');
+ expect(modal.text()).toContain('Are you sure you want to continue?');
+ });
+
+ it('should emit the "rollback" event when "ok" is clicked', () => {
+ const env = { ...environmentData, isLastDeployment: true };
+
+ createComponent({
+ environment: env,
+ hasMultipleCommits,
+ });
+
+ const eventHubSpy = jest.spyOn(eventHub, '$emit');
+ const modal = component.find(GlModal);
+ modal.vm.$emit('ok');
- expect(eventHubSpy).toHaveBeenCalledWith('rollbackEnvironment', environment);
- });
+ expect(eventHubSpy).toHaveBeenCalledWith('rollbackEnvironment', env);
+ });
+ },
+ );
});
diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js
new file mode 100644
index 00000000000..3e7f5dd5ff4
--- /dev/null
+++ b/spec/frontend/environments/edit_environment_spec.js
@@ -0,0 +1,104 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import EditEnvironment from '~/environments/components/edit_environment.vue';
+import createFlash from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+import { visitUrl } from '~/lib/utils/url_utility';
+
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/flash');
+
+const DEFAULT_OPTS = {
+ provide: {
+ projectEnvironmentsPath: '/projects/environments',
+ updateEnvironmentPath: '/proejcts/environments/1',
+ },
+ propsData: { environment: { name: 'foo', externalUrl: 'https://foo.example.com' } },
+};
+
+describe('~/environments/components/edit.vue', () => {
+ let wrapper;
+ let mock;
+ let name;
+ let url;
+ let form;
+
+ const createWrapper = (opts = {}) =>
+ mountExtended(EditEnvironment, {
+ ...DEFAULT_OPTS,
+ ...opts,
+ });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ wrapper = createWrapper();
+ name = wrapper.findByLabelText('Name');
+ url = wrapper.findByLabelText('External URL');
+ form = wrapper.findByRole('form', { name: 'Edit environment' });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ const showsLoading = () => wrapper.find(GlLoadingIcon).exists();
+
+ const submitForm = async (expected, response) => {
+ mock
+ .onPut(DEFAULT_OPTS.provide.updateEnvironmentPath, {
+ name: expected.name,
+ external_url: expected.url,
+ })
+ .reply(...response);
+ await name.setValue(expected.name);
+ await url.setValue(expected.url);
+
+ await form.trigger('submit');
+ await waitForPromises();
+ };
+
+ it('sets the title to Edit environment', () => {
+ const header = wrapper.findByRole('heading', { name: 'Edit environment' });
+ expect(header.exists()).toBe(true);
+ });
+
+ it.each`
+ input | value
+ ${() => name} | ${'test'}
+ ${() => url} | ${'https://example.org'}
+ `('it changes the value of the input to $value', async ({ input, value }) => {
+ await input().setValue(value);
+
+ expect(input().element.value).toBe(value);
+ });
+
+ it('shows loader after form is submitted', async () => {
+ const expected = { name: 'test', url: 'https://google.ca' };
+
+ expect(showsLoading()).toBe(false);
+
+ await submitForm(expected, [200, { path: '/test' }]);
+
+ expect(showsLoading()).toBe(true);
+ });
+
+ it('submits the updated environment on submit', async () => {
+ const expected = { name: 'test', url: 'https://google.ca' };
+
+ await submitForm(expected, [200, { path: '/test' }]);
+
+ expect(visitUrl).toHaveBeenCalledWith('/test');
+ });
+
+ it('shows errors on error', async () => {
+ const expected = { name: 'test', url: 'https://google.ca' };
+
+ await submitForm(expected, [400, { message: ['name taken'] }]);
+
+ expect(createFlash).toHaveBeenCalledWith({ message: 'name taken' });
+ expect(showsLoading()).toBe(false);
+ });
+});
diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js
new file mode 100644
index 00000000000..ed8fda71dab
--- /dev/null
+++ b/spec/frontend/environments/environment_form_spec.js
@@ -0,0 +1,105 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import EnvironmentForm from '~/environments/components/environment_form.vue';
+
+jest.mock('~/lib/utils/csrf');
+
+const DEFAULT_PROPS = {
+ environment: { name: '', externalUrl: '' },
+ title: 'environment',
+ cancelPath: '/cancel',
+};
+
+describe('~/environments/components/form.vue', () => {
+ let wrapper;
+
+ const createWrapper = (propsData = {}) =>
+ mountExtended(EnvironmentForm, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('links to documentation regarding environments', () => {
+ const link = wrapper.findByRole('link', { name: 'More information' });
+ expect(link.attributes('href')).toBe('/help/ci/environments/index.md');
+ });
+
+ it('links the cancel button to the cancel path', () => {
+ const cancel = wrapper.findByRole('link', { name: 'Cancel' });
+
+ expect(cancel.attributes('href')).toBe(DEFAULT_PROPS.cancelPath);
+ });
+
+ describe('name input', () => {
+ let name;
+
+ beforeEach(() => {
+ name = wrapper.findByLabelText('Name');
+ });
+
+ it('should emit changes to the name', async () => {
+ await name.setValue('test');
+ await name.trigger('blur');
+
+ expect(wrapper.emitted('change')).toEqual([[{ name: 'test', externalUrl: '' }]]);
+ });
+
+ it('should validate that the name is required', async () => {
+ await name.setValue('');
+ await name.trigger('blur');
+
+ expect(wrapper.findByText('This field is required').exists()).toBe(true);
+ expect(name.attributes('aria-invalid')).toBe('true');
+ });
+ });
+
+ describe('url input', () => {
+ let url;
+
+ beforeEach(() => {
+ url = wrapper.findByLabelText('External URL');
+ });
+
+ it('should emit changes to the url', async () => {
+ await url.setValue('https://example.com');
+ await url.trigger('blur');
+
+ expect(wrapper.emitted('change')).toEqual([
+ [{ name: '', externalUrl: 'https://example.com' }],
+ ]);
+ });
+
+ it('should validate that the url is required', async () => {
+ await url.setValue('example.com');
+ await url.trigger('blur');
+
+ expect(wrapper.findByText('The URL should start with http:// or https://').exists()).toBe(
+ true,
+ );
+ expect(url.attributes('aria-invalid')).toBe('true');
+ });
+ });
+
+ it('submits when the form does', async () => {
+ await wrapper.findByRole('form', { title: 'environment' }).trigger('submit');
+
+ expect(wrapper.emitted('submit')).toEqual([[]]);
+ });
+ });
+
+ it('shows a loading icon while loading', () => {
+ wrapper = createWrapper({ loading: true });
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index 62806c9e44c..a568a7d5396 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -1,14 +1,21 @@
import { mount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import { format } from 'timeago.js';
+import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
+import ActionsComponent from '~/environments/components/environment_actions.vue';
import DeleteComponent from '~/environments/components/environment_delete.vue';
+import ExternalUrlComponent from '~/environments/components/environment_external_url.vue';
import EnvironmentItem from '~/environments/components/environment_item.vue';
import PinComponent from '~/environments/components/environment_pin.vue';
+import RollbackComponent from '~/environments/components/environment_rollback.vue';
+import StopComponent from '~/environments/components/environment_stop.vue';
+import TerminalButtonComponent from '~/environments/components/environment_terminal_button.vue';
import { differenceInMilliseconds } from '~/lib/utils/datetime_utility';
import { environment, folder, tableData } from './mock_data';
describe('Environment item', () => {
let wrapper;
+ let tracking;
const factory = (options = {}) => {
// This destroys any wrappers created before a nested call to factory reassigns it
@@ -28,6 +35,12 @@ describe('Environment item', () => {
tableData,
},
});
+
+ tracking = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
});
const findAutoStop = () => wrapper.find('.js-auto-stop');
@@ -62,7 +75,7 @@ describe('Environment item', () => {
});
it('should not render the delete button', () => {
- expect(wrapper.find(DeleteComponent).exists()).toBe(false);
+ expect(wrapper.findComponent(DeleteComponent).exists()).toBe(false);
});
describe('With user information', () => {
@@ -176,12 +189,14 @@ describe('Environment item', () => {
});
it('should not render the auto-stop button', () => {
- expect(wrapper.find(PinComponent).exists()).toBe(false);
+ expect(wrapper.findComponent(PinComponent).exists()).toBe(false);
});
});
describe('With auto-stop date', () => {
describe('in the future', () => {
+ let pin;
+
const futureDate = new Date(Date.now() + 100000);
beforeEach(() => {
factory({
@@ -195,6 +210,9 @@ describe('Environment item', () => {
shouldShowAutoStopDate: true,
},
});
+ tracking = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ pin = wrapper.findComponent(PinComponent);
});
it('renders the date', () => {
@@ -202,7 +220,15 @@ describe('Environment item', () => {
});
it('should render the auto-stop button', () => {
- expect(wrapper.find(PinComponent).exists()).toBe(true);
+ expect(pin.exists()).toBe(true);
+ });
+
+ it('should tracks clicks', () => {
+ pin.trigger('click');
+
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'environment_pin',
+ });
});
});
@@ -227,33 +253,104 @@ describe('Environment item', () => {
});
it('should not render the suto-stop button', () => {
- expect(wrapper.find(PinComponent).exists()).toBe(false);
+ expect(wrapper.findComponent(PinComponent).exists()).toBe(false);
});
});
});
});
describe('With manual actions', () => {
+ let actions;
+
+ beforeEach(() => {
+ actions = wrapper.findComponent(ActionsComponent);
+ });
+
it('should render actions component', () => {
- expect(wrapper.find('.js-manual-actions-container')).toBeDefined();
+ expect(actions.exists()).toBe(true);
+ });
+
+ it('should track clicks', () => {
+ actions.trigger('click');
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_dropdown', {
+ label: 'environment_actions',
+ });
});
});
describe('With external URL', () => {
+ let externalUrl;
+
+ beforeEach(() => {
+ externalUrl = wrapper.findComponent(ExternalUrlComponent);
+ });
+
it('should render external url component', () => {
- expect(wrapper.find('.js-external-url-container')).toBeDefined();
+ expect(externalUrl.exists()).toBe(true);
+ });
+
+ it('should track clicks', () => {
+ externalUrl.trigger('click');
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'environment_url',
+ });
});
});
describe('With stop action', () => {
+ let stop;
+
+ beforeEach(() => {
+ stop = wrapper.findComponent(StopComponent);
+ });
+
it('should render stop action component', () => {
- expect(wrapper.find('.js-stop-component-container')).toBeDefined();
+ expect(stop.exists()).toBe(true);
+ });
+
+ it('should track clicks', () => {
+ stop.trigger('click');
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'environment_stop',
+ });
});
});
describe('With retry action', () => {
+ let rollback;
+
+ beforeEach(() => {
+ rollback = wrapper.findComponent(RollbackComponent);
+ });
+
it('should render rollback component', () => {
- expect(wrapper.find('.js-rollback-component-container')).toBeDefined();
+ expect(rollback.exists()).toBe(true);
+ });
+
+ it('should track clicks', () => {
+ rollback.trigger('click');
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'environment_rollback',
+ });
+ });
+ });
+
+ describe('With terminal path', () => {
+ let terminal;
+
+ beforeEach(() => {
+ terminal = wrapper.findComponent(TerminalButtonComponent);
+ });
+
+ it('should render terminal action component', () => {
+ expect(terminal.exists()).toBe(true);
+ });
+
+ it('should track clicks', () => {
+ triggerEvent(terminal.element);
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'environment_terminal',
+ });
});
});
});
@@ -312,7 +409,17 @@ describe('Environment item', () => {
});
it('should render the delete button', () => {
- expect(wrapper.find(DeleteComponent).exists()).toBe(true);
+ expect(wrapper.findComponent(DeleteComponent).exists()).toBe(true);
+ });
+
+ it('should trigger a tracking event', async () => {
+ tracking = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ await wrapper.findComponent(DeleteComponent).trigger('click');
+
+ expect(tracking).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'environment_delete',
+ });
});
});
});
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index 1abdeff614c..dc176001943 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -1,4 +1,4 @@
-import { GlTabs } from '@gitlab/ui';
+import { GlTabs, GlAlert } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -7,7 +7,9 @@ import DeployBoard from '~/environments/components/deploy_board.vue';
import EmptyState from '~/environments/components/empty_state.vue';
import EnableReviewAppModal from '~/environments/components/enable_review_app_modal.vue';
import EnvironmentsApp from '~/environments/components/environments_app.vue';
+import { ENVIRONMENTS_SURVEY_DISMISSED_COOKIE_NAME } from '~/environments/constants';
import axios from '~/lib/utils/axios_utils';
+import { setCookie, getCookie, removeCookie } from '~/lib/utils/common_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import { environment, folder } from './mock_data';
@@ -48,6 +50,7 @@ describe('Environment', () => {
const findNewEnvironmentButton = () => wrapper.findByTestId('new-environment');
const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available > a');
const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped > a');
+ const findSurveyAlert = () => wrapper.find(GlAlert);
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -280,4 +283,49 @@ describe('Environment', () => {
expect(wrapper.findComponent(GlTabs).attributes('value')).toBe('1');
});
});
+
+ describe('survey alert', () => {
+ beforeEach(async () => {
+ mockRequest(200, { environments: [] });
+ await createWrapper(true);
+ });
+
+ afterEach(() => {
+ removeCookie(ENVIRONMENTS_SURVEY_DISMISSED_COOKIE_NAME);
+ });
+
+ describe('when the user has not dismissed the alert', () => {
+ it('shows the alert', () => {
+ expect(findSurveyAlert().exists()).toBe(true);
+ });
+
+ describe('when the user dismisses the alert', () => {
+ beforeEach(() => {
+ findSurveyAlert().vm.$emit('dismiss');
+ });
+
+ it('hides the alert', () => {
+ expect(findSurveyAlert().exists()).toBe(false);
+ });
+
+ it('persists the dismisal using a cookie', () => {
+ const cookieValue = getCookie(ENVIRONMENTS_SURVEY_DISMISSED_COOKIE_NAME);
+
+ expect(cookieValue).toBe('true');
+ });
+ });
+ });
+
+ describe('when the user has previously dismissed the alert', () => {
+ beforeEach(async () => {
+ setCookie(ENVIRONMENTS_SURVEY_DISMISSED_COOKIE_NAME, 'true');
+
+ await createWrapper(true);
+ });
+
+ it('does not show the alert', () => {
+ expect(findSurveyAlert().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/environments/environments_detail_header_spec.js b/spec/frontend/environments/environments_detail_header_spec.js
new file mode 100644
index 00000000000..6334060c736
--- /dev/null
+++ b/spec/frontend/environments/environments_detail_header_spec.js
@@ -0,0 +1,238 @@
+import { GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
+import EnvironmentsDetailHeader from '~/environments/components/environments_detail_header.vue';
+import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
+import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
+import { createEnvironment } from './mock_data';
+
+describe('Environments detail header component', () => {
+ const cancelAutoStopPath = '/my-environment/cancel/path';
+ const terminalPath = '/my-environment/terminal/path';
+ const metricsPath = '/my-environment/metrics/path';
+ const updatePath = '/my-environment/edit/path';
+
+ let wrapper;
+
+ const findHeader = () => wrapper.findByRole('heading');
+ const findAutoStopsAt = () => wrapper.findByTestId('auto-stops-at');
+ const findCancelAutoStopAtButton = () => wrapper.findByTestId('cancel-auto-stop-button');
+ const findCancelAutoStopAtForm = () => wrapper.findByTestId('cancel-auto-stop-form');
+ const findTerminalButton = () => wrapper.findByTestId('terminal-button');
+ const findExternalUrlButton = () => wrapper.findByTestId('external-url-button');
+ const findMetricsButton = () => wrapper.findByTestId('metrics-button');
+ const findEditButton = () => wrapper.findByTestId('edit-button');
+ const findStopButton = () => wrapper.findByTestId('stop-button');
+ const findDestroyButton = () => wrapper.findByTestId('destroy-button');
+ const findStopEnvironmentModal = () => wrapper.findComponent(StopEnvironmentModal);
+ const findDeleteEnvironmentModal = () => wrapper.findComponent(DeleteEnvironmentModal);
+
+ const buttons = [
+ ['Cancel Auto Stop At', findCancelAutoStopAtButton],
+ ['Terminal', findTerminalButton],
+ ['External Url', findExternalUrlButton],
+ ['Metrics', findMetricsButton],
+ ['Edit', findEditButton],
+ ['Stop', findStopButton],
+ ['Destroy', findDestroyButton],
+ ];
+
+ const createWrapper = ({ props }) => {
+ wrapper = shallowMountExtended(EnvironmentsDetailHeader, {
+ stubs: {
+ GlSprintf,
+ TimeAgo,
+ },
+ propsData: {
+ canReadEnvironment: false,
+ canAdminEnvironment: false,
+ canUpdateEnvironment: false,
+ canStopEnvironment: false,
+ canDestroyEnvironment: false,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default state with minimal access', () => {
+ beforeEach(() => {
+ createWrapper({ props: { environment: createEnvironment() } });
+ });
+
+ it('displays the environment name', () => {
+ expect(findHeader().text()).toBe('My environment');
+ });
+
+ it('does not display an auto stops at text', () => {
+ expect(findAutoStopsAt().exists()).toBe(false);
+ });
+
+ it.each(buttons)('does not display button: %s', (_, findSelector) => {
+ expect(findSelector().exists()).toBe(false);
+ });
+
+ it('does not display stop environment modal', () => {
+ expect(findStopEnvironmentModal().exists()).toBe(false);
+ });
+
+ it('does not display delete environment modal', () => {
+ expect(findDeleteEnvironmentModal().exists()).toBe(false);
+ });
+ });
+
+ describe('when auto stops at is enabled and environment is available', () => {
+ beforeEach(() => {
+ const now = new Date();
+ const tomorrow = new Date();
+ tomorrow.setDate(now.getDate() + 1);
+ createWrapper({
+ props: {
+ environment: createEnvironment({ autoStopAt: tomorrow.toISOString() }),
+ cancelAutoStopPath,
+ },
+ });
+ });
+
+ it('displays a text that describes when the environment is going to be stopped', () => {
+ expect(findAutoStopsAt().text()).toBe('Auto stops in 1 day');
+ });
+
+ it('displays a cancel auto stops at button with a form to make a post request', () => {
+ const button = findCancelAutoStopAtButton();
+ const form = findCancelAutoStopAtForm();
+ expect(form.attributes('action')).toBe(cancelAutoStopPath);
+ expect(form.attributes('method')).toBe('POST');
+ expect(button.props('icon')).toBe('thumbtack');
+ expect(button.attributes('type')).toBe('submit');
+ });
+
+ it('includes a csrf token', () => {
+ const input = findCancelAutoStopAtForm().find('input');
+ expect(input.attributes('name')).toBe('authenticity_token');
+ });
+ });
+
+ describe('when auto stops at is enabled and environment is unavailable (already stopped)', () => {
+ beforeEach(() => {
+ const now = new Date();
+ const tomorrow = new Date();
+ tomorrow.setDate(now.getDate() + 1);
+ createWrapper({
+ props: {
+ environment: createEnvironment({
+ autoStopAt: tomorrow.toISOString(),
+ isAvailable: false,
+ }),
+ cancelAutoStopPath,
+ },
+ });
+ });
+
+ it('does not display a text that describes when the environment is going to be stopped', () => {
+ expect(findAutoStopsAt().exists()).toBe(false);
+ });
+
+ it('displays a cancel auto stops at button with correct path', () => {
+ expect(findCancelAutoStopAtButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when has a terminal', () => {
+ beforeEach(() => {
+ createWrapper({
+ props: {
+ environment: createEnvironment({ hasTerminals: true }),
+ canAdminEnvironment: true,
+ terminalPath,
+ },
+ });
+ });
+
+ it('displays the terminal button with correct path', () => {
+ expect(findTerminalButton().attributes('href')).toBe(terminalPath);
+ });
+ });
+
+ describe('when has an external url enabled', () => {
+ const externalUrl = 'https://example.com/my-environment/external/url';
+
+ beforeEach(() => {
+ createWrapper({
+ props: {
+ environment: createEnvironment({ hasTerminals: true, externalUrl }),
+ canReadEnvironment: true,
+ },
+ });
+ });
+
+ it('displays the external url button with correct path', () => {
+ expect(findExternalUrlButton().attributes('href')).toBe(externalUrl);
+ });
+ });
+
+ describe('when metrics are enabled', () => {
+ beforeEach(() => {
+ createWrapper({
+ props: {
+ environment: createEnvironment(),
+ canReadEnvironment: true,
+ metricsPath,
+ },
+ });
+ });
+
+ it('displays the metrics button with correct path', () => {
+ expect(findMetricsButton().attributes('href')).toBe(metricsPath);
+ });
+ });
+
+ describe('when has all admin rights', () => {
+ beforeEach(() => {
+ createWrapper({
+ props: {
+ environment: createEnvironment(),
+ canReadEnvironment: true,
+ canAdminEnvironment: true,
+ canStopEnvironment: true,
+ canUpdateEnvironment: true,
+ updatePath,
+ },
+ });
+ });
+
+ it('displays the edit button with correct path', () => {
+ expect(findEditButton().attributes('href')).toBe(updatePath);
+ });
+
+ it('displays the stop button with correct icon', () => {
+ expect(findStopButton().attributes('icon')).toBe('stop');
+ });
+
+ it('displays stop environment modal', () => {
+ expect(findStopEnvironmentModal().exists()).toBe(true);
+ });
+ });
+
+ describe('when the environment is unavailable and user has destroy permissions', () => {
+ beforeEach(() => {
+ createWrapper({
+ props: {
+ environment: createEnvironment({ isAvailable: false }),
+ canDestroyEnvironment: true,
+ },
+ });
+ });
+
+ it('displays a delete button', () => {
+ expect(findDestroyButton().exists()).toBe(true);
+ });
+
+ it('displays delete environment modal', () => {
+ expect(findDeleteEnvironmentModal().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/environments/mock_data.js b/spec/frontend/environments/mock_data.js
index 9ba71b78c2f..a6d67c26304 100644
--- a/spec/frontend/environments/mock_data.js
+++ b/spec/frontend/environments/mock_data.js
@@ -71,6 +71,8 @@ const environment = {
state: 'stopped',
external_url: 'http://external.com',
environment_type: null,
+ can_stop: true,
+ terminal_path: '/terminal',
last_deployment: {
id: 66,
iid: 6,
@@ -301,4 +303,22 @@ const tableData = {
},
};
-export { environment, environmentsList, folder, serverData, tableData, deployBoardMockData };
+const createEnvironment = (data = {}) => ({
+ id: 1,
+ name: 'My environment',
+ externalUrl: 'my external url',
+ isAvailable: true,
+ hasTerminals: false,
+ autoStopAt: null,
+ ...data,
+});
+
+export {
+ environment,
+ environmentsList,
+ folder,
+ serverData,
+ tableData,
+ deployBoardMockData,
+ createEnvironment,
+};
diff --git a/spec/frontend/environments/new_environment_spec.js b/spec/frontend/environments/new_environment_spec.js
new file mode 100644
index 00000000000..f6d970e02d8
--- /dev/null
+++ b/spec/frontend/environments/new_environment_spec.js
@@ -0,0 +1,100 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import NewEnvironment from '~/environments/components/new_environment.vue';
+import createFlash from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+import { visitUrl } from '~/lib/utils/url_utility';
+
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/flash');
+
+const DEFAULT_OPTS = {
+ provide: { projectEnvironmentsPath: '/projects/environments' },
+};
+
+describe('~/environments/components/new.vue', () => {
+ let wrapper;
+ let mock;
+ let name;
+ let url;
+ let form;
+
+ const createWrapper = (opts = {}) =>
+ mountExtended(NewEnvironment, {
+ ...DEFAULT_OPTS,
+ ...opts,
+ });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ wrapper = createWrapper();
+ name = wrapper.findByLabelText('Name');
+ url = wrapper.findByLabelText('External URL');
+ form = wrapper.findByRole('form', { name: 'New environment' });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ const showsLoading = () => wrapper.find(GlLoadingIcon).exists();
+
+ const submitForm = async (expected, response) => {
+ mock
+ .onPost(DEFAULT_OPTS.provide.projectEnvironmentsPath, {
+ name: expected.name,
+ external_url: expected.url,
+ })
+ .reply(...response);
+ await name.setValue(expected.name);
+ await url.setValue(expected.url);
+
+ await form.trigger('submit');
+ await waitForPromises();
+ };
+
+ it('sets the title to New environment', () => {
+ const header = wrapper.findByRole('heading', { name: 'New environment' });
+ expect(header.exists()).toBe(true);
+ });
+
+ it.each`
+ input | value
+ ${() => name} | ${'test'}
+ ${() => url} | ${'https://example.org'}
+ `('it changes the value of the input to $value', async ({ input, value }) => {
+ await input().setValue(value);
+
+ expect(input().element.value).toBe(value);
+ });
+
+ it('shows loader after form is submitted', async () => {
+ const expected = { name: 'test', url: 'https://google.ca' };
+
+ expect(showsLoading()).toBe(false);
+
+ await submitForm(expected, [200, { path: '/test' }]);
+
+ expect(showsLoading()).toBe(true);
+ });
+
+ it('submits the new environment on submit', async () => {
+ const expected = { name: 'test', url: 'https://google.ca' };
+
+ await submitForm(expected, [200, { path: '/test' }]);
+
+ expect(visitUrl).toHaveBeenCalledWith('/test');
+ });
+
+ it('shows errors on error', async () => {
+ const expected = { name: 'test', url: 'https://google.ca' };
+
+ await submitForm(expected, [400, { message: ['name taken'] }]);
+
+ expect(createFlash).toHaveBeenCalledWith({ message: 'name taken' });
+ expect(showsLoading()).toBe(false);
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js b/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
index 02216370b79..07aa456e69e 100644
--- a/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
@@ -66,15 +66,14 @@ describe('feature_flags/components/strategies/flexible_rollout.vue', () => {
});
it('emits a change when the stickiness value changes', async () => {
- stickinessSelect.setValue('USERID');
- await wrapper.vm.$nextTick();
+ await stickinessSelect.setValue('userId');
expect(wrapper.emitted('change')).toEqual([
[
{
parameters: {
rollout: flexibleRolloutStrategy.parameters.rollout,
groupId: PERCENT_ROLLOUT_GROUP_ID,
- stickiness: 'USERID',
+ stickiness: 'userId',
},
},
],
diff --git a/spec/frontend/feature_flags/mock_data.js b/spec/frontend/feature_flags/mock_data.js
index b5f09ac1957..4c40c2acf01 100644
--- a/spec/frontend/feature_flags/mock_data.js
+++ b/spec/frontend/feature_flags/mock_data.js
@@ -76,7 +76,7 @@ export const percentRolloutStrategy = {
export const flexibleRolloutStrategy = {
name: ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT,
- parameters: { rollout: '50', groupId: 'default', stickiness: 'DEFAULT' },
+ parameters: { rollout: '50', groupId: 'default', stickiness: 'default' },
scopes: [],
};
diff --git a/spec/frontend/fixtures/analytics.rb b/spec/frontend/fixtures/analytics.rb
new file mode 100644
index 00000000000..6d106dce166
--- /dev/null
+++ b/spec/frontend/fixtures/analytics.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Analytics (JavaScript fixtures)', :sidekiq_inline do
+ include_context 'Analytics fixtures shared context'
+
+ let_it_be(:value_stream_id) { 'default' }
+
+ before(:all) do
+ clean_frontend_fixtures('projects/analytics/value_stream_analytics/')
+ end
+
+ before do
+ update_metrics
+ create_deployment
+ end
+
+ describe Projects::Analytics::CycleAnalytics::StagesController, type: :controller do
+ render_views
+
+ let(:params) { { namespace_id: group, project_id: project, value_stream_id: value_stream_id } }
+
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+ end
+
+ it 'projects/analytics/value_stream_analytics/stages' do
+ get(:index, params: params, format: :json)
+
+ expect(response).to be_successful
+ end
+ end
+
+ describe Projects::CycleAnalytics::EventsController, type: :controller do
+ render_views
+ let(:params) { { namespace_id: group, project_id: project, value_stream_id: value_stream_id } }
+
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+ end
+
+ Gitlab::Analytics::CycleAnalytics::DefaultStages.all.each do |stage|
+ it "projects/analytics/value_stream_analytics/events/#{stage[:name]}" do
+ get(stage[:name], params: params, format: :json)
+
+ expect(response).to be_successful
+ end
+ end
+ end
+
+ describe Projects::Analytics::CycleAnalytics::SummaryController, type: :controller do
+ render_views
+ let(:params) { { namespace_id: group, project_id: project, value_stream_id: value_stream_id } }
+
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+ end
+
+ it "projects/analytics/value_stream_analytics/summary" do
+ get(:show, params: params, format: :json)
+
+ expect(response).to be_successful
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/api_markdown.rb b/spec/frontend/fixtures/api_markdown.rb
index 94db262e4fd..cb9a116f293 100644
--- a/spec/frontend/fixtures/api_markdown.rb
+++ b/spec/frontend/fixtures/api_markdown.rb
@@ -7,12 +7,17 @@ RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
include WikiHelpers
include JavaScriptFixturesHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, username: 'gitlab') }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, :repository, group: group) }
- let_it_be(:project_wiki) { create(:project_wiki, user: user) }
+ let_it_be(:label) { create(:label, project: project, title: 'bug') }
+ let_it_be(:milestone) { create(:milestone, project: project, title: '1.1') }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let_it_be(:project_wiki) { create(:project_wiki, project: project, user: user) }
let(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) }
diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml
index 8d8c9a1d902..b581aac6aee 100644
--- a/spec/frontend/fixtures/api_markdown.yml
+++ b/spec/frontend/fixtures/api_markdown.yml
@@ -8,6 +8,14 @@
markdown: '_emphasized text_'
- name: inline_code
markdown: '`code`'
+- name: inline_diff
+ markdown: |-
+ * {-deleted-}
+ * {+added+}
+- name: subscript
+ markdown: H<sub>2</sub>O
+- name: superscript
+ markdown: 2<sup>8</sup> = 256
- name: strike
markdown: '~~del~~'
- name: horizontal_rule
@@ -68,6 +76,22 @@
1. list item 1
2. list item 2
3. list item 3
+- name: task_list
+ markdown: |-
+ * [x] hello
+ * [x] world
+ * [ ] example
+ * [ ] of nested
+ * [x] task list
+ * [ ] items
+- name: ordered_task_list
+ markdown: |-
+ 1. [x] hello
+ 2. [x] world
+ 3. [ ] example
+ 1. [ ] of nested
+ 1. [x] task list
+ 2. [ ] items
- name: image
markdown: '![alt text](https://gitlab.com/logo.png)'
- name: hard_break
@@ -86,4 +110,9 @@
|--------|------------|----------|
| cell | cell | cell |
| cell | cell | cell |
-
+- name: emoji
+ markdown: ':sparkles: :heart: :100:'
+- name: reference
+ context: project_wiki
+ markdown: |-
+ Hi @gitlab - thank you for reporting this ~bug (#1) we hope to fix it in %1.1 as part of !1
diff --git a/spec/frontend/fixtures/startup_css.rb b/spec/frontend/fixtures/startup_css.rb
index 003f7b768dd..be2ead756cf 100644
--- a/spec/frontend/fixtures/startup_css.rb
+++ b/spec/frontend/fixtures/startup_css.rb
@@ -10,8 +10,6 @@ RSpec.describe 'Startup CSS fixtures', type: :controller do
render_views
before(:all) do
- stub_feature_flags(combined_menu: true)
- stub_feature_flags(sidebar_refactor: true)
clean_frontend_fixtures('startup_css/')
end
@@ -23,17 +21,6 @@ RSpec.describe 'Startup CSS fixtures', type: :controller do
sign_in(user)
end
- it "startup_css/project-#{type}-legacy-menu.html" do
- stub_feature_flags(combined_menu: false)
-
- get :show, params: {
- namespace_id: project.namespace.to_param,
- id: project
- }
-
- expect(response).to be_successful
- end
-
it "startup_css/project-#{type}.html" do
get :show, params: {
namespace_id: project.namespace.to_param,
@@ -43,17 +30,6 @@ RSpec.describe 'Startup CSS fixtures', type: :controller do
expect(response).to be_successful
end
- it "startup_css/project-#{type}-legacy-sidebar.html" do
- stub_feature_flags(sidebar_refactor: false)
-
- get :show, params: {
- namespace_id: project.namespace.to_param,
- id: project
- }
-
- expect(response).to be_successful
- end
-
it "startup_css/project-#{type}-signed-out.html" do
sign_out(user)
diff --git a/spec/frontend/graphql_shared/utils_spec.js b/spec/frontend/graphql_shared/utils_spec.js
index 56bfb02ea4a..1732f24eeff 100644
--- a/spec/frontend/graphql_shared/utils_spec.js
+++ b/spec/frontend/graphql_shared/utils_spec.js
@@ -1,4 +1,5 @@
import {
+ isGid,
getIdFromGraphQLId,
convertToGraphQLId,
convertToGraphQLIds,
@@ -10,6 +11,16 @@ const mockType = 'Group';
const mockId = 12;
const mockGid = `gid://gitlab/Group/12`;
+describe('isGid', () => {
+ it('returns true if passed id is gid', () => {
+ expect(isGid(mockGid)).toBe(true);
+ });
+
+ it('returns false if passed id is not gid', () => {
+ expect(isGid(mockId)).toBe(false);
+ });
+});
+
describe('getIdFromGraphQLId', () => {
[
{
@@ -67,6 +78,10 @@ describe('convertToGraphQLId', () => {
`('throws TypeError with "$message" if a param is missing', ({ type, id, message }) => {
expect(() => convertToGraphQLId(type, id)).toThrow(new TypeError(message));
});
+
+ it('returns id as is if it follows the gid format', () => {
+ expect(convertToGraphQLId(mockType, mockGid)).toStrictEqual(mockGid);
+ });
});
describe('convertToGraphQLIds', () => {
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 2369685f506..60d47895a95 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import Vue from 'vue';
+import waitForPromises from 'helpers/wait_for_promises';
import GroupFolder from '~/groups/components/group_folder.vue';
import GroupItem from '~/groups/components/group_item.vue';
import ItemActions from '~/groups/components/item_actions.vue';
@@ -22,8 +22,7 @@ describe('GroupItemComponent', () => {
beforeEach(() => {
wrapper = createComponent();
-
- return Vue.nextTick();
+ return waitForPromises();
});
afterEach(() => {
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 4bf3334ae6b..3f722c24dbb 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -166,6 +166,11 @@ describe('RepoEditor', () => {
expect(tabs).toHaveLength(1);
expect(tabs.at(0).text()).toBe('Edit');
});
+
+ it('does not get markdown extension by default', async () => {
+ await createComponent();
+ expect(vm.editor.projectPath).toBeUndefined();
+ });
});
describe('when file is markdown', () => {
@@ -213,6 +218,11 @@ describe('RepoEditor', () => {
});
expect(findTabs()).toHaveLength(0);
});
+
+ it('uses the markdown extension and sets it up correctly', async () => {
+ await createComponent({ activeFile });
+ expect(vm.editor.projectPath).toBe(vm.currentProjectId);
+ });
});
describe('when file is binary and not raw', () => {
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 99ef6d9a7fb..bbd8463e685 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -3,21 +3,22 @@ import {
GlEmptyState,
GlLoadingIcon,
GlSearchBoxByClick,
- GlSprintf,
GlDropdown,
GlDropdownItem,
+ GlTable,
} from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { mount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import stubChildren from 'helpers/stub_children';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { STATUSES } from '~/import_entities/constants';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
-import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
+import ImportTargetCell from '~/import_entities/import_groups/components/import_target_cell.vue';
import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
-import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
-import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
+import setImportTargetMutation from '~/import_entities/import_groups/graphql/mutations/set_import_target.mutation.graphql';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import { availableNamespacesFixture, generateFakeEntry } from '../graphql/fixtures';
@@ -41,10 +42,15 @@ describe('import table', () => {
];
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
- const findImportAllButton = () => wrapper.find('h1').find(GlButton);
+ const findImportSelectedButton = () =>
+ wrapper.findAllComponents(GlButton).wrappers.find((w) => w.text() === 'Import selected');
const findPaginationDropdown = () => wrapper.findComponent(GlDropdown);
const findPaginationDropdownText = () => findPaginationDropdown().find({ ref: 'text' }).text();
+ // TODO: remove this ugly approach when
+ // issue: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1531
+ const findTable = () => wrapper.vm.getTableRef();
+
const createComponent = ({ bulkImportSourceGroups }) => {
apolloProvider = createMockApollo([], {
Query: {
@@ -58,14 +64,17 @@ describe('import table', () => {
},
});
- wrapper = shallowMount(ImportTable, {
+ wrapper = mount(ImportTable, {
propsData: {
groupPathRegex: /.*/,
sourceUrl: SOURCE_URL,
+ groupUrlErrorMessage: 'Please choose a group URL with no special characters or spaces.',
},
stubs: {
- GlSprintf,
+ ...stubChildren(ImportTable),
+ GlSprintf: false,
GlDropdown: GlDropdownStub,
+ GlTable: false,
},
localVue,
apolloProvider,
@@ -115,7 +124,7 @@ describe('import table', () => {
});
await waitForPromises();
- expect(wrapper.findAll(ImportTableRow)).toHaveLength(FAKE_GROUPS.length);
+ expect(wrapper.findAll('tbody tr')).toHaveLength(FAKE_GROUPS.length);
});
it('does not render status string when result list is empty', async () => {
@@ -139,19 +148,32 @@ describe('import table', () => {
});
it.each`
- event | payload | mutation | variables
- ${'update-target-namespace'} | ${'new-namespace'} | ${setTargetNamespaceMutation} | ${{ sourceGroupId: FAKE_GROUP.id, targetNamespace: 'new-namespace' }}
- ${'update-new-name'} | ${'new-name'} | ${setNewNameMutation} | ${{ sourceGroupId: FAKE_GROUP.id, newName: 'new-name' }}
- ${'import-group'} | ${undefined} | ${importGroupsMutation} | ${{ sourceGroupIds: [FAKE_GROUP.id] }}
+ event | payload | mutation | variables
+ ${'update-target-namespace'} | ${'new-namespace'} | ${setImportTargetMutation} | ${{ sourceGroupId: FAKE_GROUP.id, targetNamespace: 'new-namespace', newName: 'group1' }}
+ ${'update-new-name'} | ${'new-name'} | ${setImportTargetMutation} | ${{ sourceGroupId: FAKE_GROUP.id, targetNamespace: 'root', newName: 'new-name' }}
`('correctly maps $event to mutation', async ({ event, payload, mutation, variables }) => {
jest.spyOn(apolloProvider.defaultClient, 'mutate');
- wrapper.find(ImportTableRow).vm.$emit(event, payload);
+ wrapper.find(ImportTargetCell).vm.$emit(event, payload);
await waitForPromises();
expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
mutation,
variables,
});
});
+
+ it('invokes importGroups mutation when row button is clicked', async () => {
+ jest.spyOn(apolloProvider.defaultClient, 'mutate');
+ const triggerImportButton = wrapper
+ .findAllComponents(GlButton)
+ .wrappers.find((w) => w.text() === 'Import');
+
+ triggerImportButton.vm.$emit('click');
+ await waitForPromises();
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [FAKE_GROUP.id] },
+ });
+ });
});
describe('pagination', () => {
@@ -279,16 +301,20 @@ describe('import table', () => {
});
});
- describe('import all button', () => {
- it('does not exists when no groups available', () => {
+ describe('bulk operations', () => {
+ it('import selected button is disabled when no groups selected', async () => {
createComponent({
- bulkImportSourceGroups: () => new Promise(() => {}),
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
});
+ await waitForPromises();
- expect(findImportAllButton().exists()).toBe(false);
+ expect(findImportSelectedButton().props().disabled).toBe(true);
});
- it('exists when groups are available for import', async () => {
+ it('import selected button is enabled when groups were selected for import', async () => {
createComponent({
bulkImportSourceGroups: () => ({
nodes: FAKE_GROUPS,
@@ -296,16 +322,14 @@ describe('import table', () => {
}),
});
await waitForPromises();
+ wrapper.find(GlTable).vm.$emit('row-selected', [FAKE_GROUPS[0]]);
+ await nextTick();
- expect(findImportAllButton().exists()).toBe(true);
+ expect(findImportSelectedButton().props().disabled).toBe(false);
});
- it('counts only not-imported groups', async () => {
- const NEW_GROUPS = [
- generateFakeEntry({ id: 1, status: STATUSES.NONE }),
- generateFakeEntry({ id: 2, status: STATUSES.NONE }),
- generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
- ];
+ it('does not allow selecting already started groups', async () => {
+ const NEW_GROUPS = [generateFakeEntry({ id: 1, status: STATUSES.FINISHED })];
createComponent({
bulkImportSourceGroups: () => ({
@@ -315,17 +339,41 @@ describe('import table', () => {
});
await waitForPromises();
- expect(findImportAllButton().text()).toMatchInterpolatedText('Import 2 groups');
+ findTable().selectRow(0);
+ await nextTick();
+
+ expect(findImportSelectedButton().props().disabled).toBe(true);
});
- it('disables button when any group has validation errors', async () => {
+ it('does not allow selecting groups with validation errors', async () => {
const NEW_GROUPS = [
- generateFakeEntry({ id: 1, status: STATUSES.NONE }),
generateFakeEntry({
id: 2,
status: STATUSES.NONE,
- validation_errors: [{ field: 'new_name', message: 'test validation error' }],
+ validation_errors: [{ field: 'new_name', message: 'FAKE_VALIDATION_ERROR' }],
}),
+ ];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: NEW_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ // TODO: remove this ugly approach when
+ // issue: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1531
+ findTable().selectRow(0);
+ await nextTick();
+
+ expect(findImportSelectedButton().props().disabled).toBe(true);
+ });
+
+ it('invokes importGroups mutation when import selected button is clicked', async () => {
+ const NEW_GROUPS = [
+ generateFakeEntry({ id: 1, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 2, status: STATUSES.NONE }),
generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
];
@@ -335,9 +383,19 @@ describe('import table', () => {
pageInfo: FAKE_PAGE_INFO,
}),
});
+ jest.spyOn(apolloProvider.defaultClient, 'mutate');
await waitForPromises();
- expect(findImportAllButton().props().disabled).toBe(true);
+ findTable().selectRow(0);
+ findTable().selectRow(1);
+ await nextTick();
+
+ findImportSelectedButton().vm.$emit('click');
+
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [NEW_GROUPS[0].id, NEW_GROUPS[1].id] },
+ });
});
});
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
index 654a8fd00d3..8231297e594 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
@@ -2,19 +2,13 @@ import { GlButton, GlDropdownItem, GlLink, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
import ImportGroupDropdown from '~/import_entities/components/group_dropdown.vue';
import { STATUSES } from '~/import_entities/constants';
-import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
-import addValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql';
-import removeValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql';
-import groupAndProjectQuery from '~/import_entities/import_groups/graphql/queries/groupAndProject.query.graphql';
+import ImportTargetCell from '~/import_entities/import_groups/components/import_target_cell.vue';
import { availableNamespacesFixture } from '../graphql/fixtures';
Vue.use(VueApollo);
-const { i18n: I18N } = ImportTableRow;
-
const getFakeGroup = (status) => ({
web_url: 'https://fake.host/',
full_path: 'fake_group_1',
@@ -28,48 +22,23 @@ const getFakeGroup = (status) => ({
progress: { status },
});
-const EXISTING_GROUP_TARGET_NAMESPACE = 'existing-group';
-const EXISTING_GROUP_PATH = 'existing-path';
-const EXISTING_PROJECT_PATH = 'existing-project-path';
-
-describe('import table row', () => {
+describe('import target cell', () => {
let wrapper;
- let apolloProvider;
let group;
const findByText = (cmp, text) => {
return wrapper.findAll(cmp).wrappers.find((node) => node.text().indexOf(text) === 0);
};
- const findImportButton = () => findByText(GlButton, 'Import');
const findNameInput = () => wrapper.find(GlFormInput);
const findNamespaceDropdown = () => wrapper.find(ImportGroupDropdown);
const createComponent = (props) => {
- apolloProvider = createMockApollo([
- [
- groupAndProjectQuery,
- ({ fullPath }) => {
- const existingGroup =
- fullPath === `${EXISTING_GROUP_TARGET_NAMESPACE}/${EXISTING_GROUP_PATH}`
- ? { id: 1 }
- : null;
-
- const existingProject =
- fullPath === `${EXISTING_GROUP_TARGET_NAMESPACE}/${EXISTING_PROJECT_PATH}`
- ? { id: 1 }
- : null;
-
- return Promise.resolve({ data: { existingGroup, existingProject } });
- },
- ],
- ]);
-
- wrapper = shallowMount(ImportTableRow, {
- apolloProvider,
+ wrapper = shallowMount(ImportTargetCell, {
stubs: { ImportGroupDropdown },
propsData: {
availableNamespaces: availableNamespacesFixture,
groupPathRegex: /.*/,
+ groupUrlErrorMessage: 'Please choose a group URL with no special characters or spaces.',
...props,
},
});
@@ -86,14 +55,10 @@ describe('import table row', () => {
createComponent({ group });
});
- it.each`
- selector | sourceEvent | payload | event
- ${findNameInput} | ${'input'} | ${'demo'} | ${'update-new-name'}
- ${findImportButton} | ${'click'} | ${undefined} | ${'import-group'}
- `('invokes $event', ({ selector, sourceEvent, payload, event }) => {
- selector().vm.$emit(sourceEvent, payload);
- expect(wrapper.emitted(event)).toBeDefined();
- expect(wrapper.emitted(event)[0][0]).toBe(payload);
+ it('invokes $event', () => {
+ findNameInput().vm.$emit('input', 'demo');
+ expect(wrapper.emitted('update-new-name')).toBeDefined();
+ expect(wrapper.emitted('update-new-name')[0][0]).toBe('demo');
});
it('emits update-target-namespace when dropdown option is clicked', () => {
@@ -113,10 +78,6 @@ describe('import table row', () => {
createComponent({ group });
});
- it('renders Import button', () => {
- expect(findByText(GlButton, 'Import').exists()).toBe(true);
- });
-
it('renders namespace dropdown as not disabled', () => {
expect(findNamespaceDropdown().attributes('disabled')).toBe(undefined);
});
@@ -198,7 +159,9 @@ describe('import table row', () => {
groupPathRegex: /^[a-zA-Z]+$/,
});
- expect(wrapper.text()).toContain('Please choose a group URL with no special characters.');
+ expect(wrapper.text()).toContain(
+ 'Please choose a group URL with no special characters or spaces.',
+ );
});
it('reports invalid group name if relevant validation error exists', async () => {
@@ -221,101 +184,5 @@ describe('import table row', () => {
expect(wrapper.text()).toContain(FAKE_ERROR_MESSAGE);
});
-
- it('sets validation error when targetting existing group', async () => {
- const testGroup = getFakeGroup(STATUSES.NONE);
-
- createComponent({
- group: {
- ...testGroup,
- import_target: {
- target_namespace: EXISTING_GROUP_TARGET_NAMESPACE,
- new_name: EXISTING_GROUP_PATH,
- },
- },
- });
-
- jest.spyOn(wrapper.vm.$apollo, 'mutate');
-
- jest.runOnlyPendingTimers();
- await nextTick();
-
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: addValidationErrorMutation,
- variables: {
- field: 'new_name',
- message: I18N.NAME_ALREADY_EXISTS,
- sourceGroupId: testGroup.id,
- },
- });
- });
-
- it('sets validation error when targetting existing project', async () => {
- const testGroup = getFakeGroup(STATUSES.NONE);
-
- createComponent({
- group: {
- ...testGroup,
- import_target: {
- target_namespace: EXISTING_GROUP_TARGET_NAMESPACE,
- new_name: EXISTING_PROJECT_PATH,
- },
- },
- });
-
- jest.spyOn(wrapper.vm.$apollo, 'mutate');
-
- jest.runOnlyPendingTimers();
- await nextTick();
-
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: addValidationErrorMutation,
- variables: {
- field: 'new_name',
- message: I18N.NAME_ALREADY_EXISTS,
- sourceGroupId: testGroup.id,
- },
- });
- });
-
- it('clears validation error when target is updated', async () => {
- const testGroup = getFakeGroup(STATUSES.NONE);
-
- createComponent({
- group: {
- ...testGroup,
- import_target: {
- target_namespace: EXISTING_GROUP_TARGET_NAMESPACE,
- new_name: EXISTING_PROJECT_PATH,
- },
- },
- });
-
- jest.runOnlyPendingTimers();
- await nextTick();
-
- jest.spyOn(wrapper.vm.$apollo, 'mutate');
-
- await wrapper.setProps({
- group: {
- ...testGroup,
- import_target: {
- target_namespace: 'valid_namespace',
- new_name: 'valid_path',
- },
- },
- });
-
- jest.runOnlyPendingTimers();
- await nextTick();
-
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: removeValidationErrorMutation,
- variables: {
- field: 'new_name',
- sourceGroupId: testGroup.id,
- },
- });
- });
});
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index ef83c9ebbc4..ec50dfd037f 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -12,12 +12,12 @@ import addValidationErrorMutation from '~/import_entities/import_groups/graphql/
import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
import removeValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql';
import setImportProgressMutation from '~/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql';
-import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
-import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
+import setImportTargetMutation from '~/import_entities/import_groups/graphql/mutations/set_import_target.mutation.graphql';
import updateImportStatusMutation from '~/import_entities/import_groups/graphql/mutations/update_import_status.mutation.graphql';
import availableNamespacesQuery from '~/import_entities/import_groups/graphql/queries/available_namespaces.query.graphql';
import bulkImportSourceGroupQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_group.query.graphql';
import bulkImportSourceGroupsQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_groups.query.graphql';
+import groupAndProjectQuery from '~/import_entities/import_groups/graphql/queries/group_and_project.query.graphql';
import { StatusPoller } from '~/import_entities/import_groups/graphql/services/status_poller';
import axios from '~/lib/utils/axios_utils';
@@ -38,18 +38,29 @@ const FAKE_ENDPOINTS = {
jobs: '/fake_jobs',
};
+const FAKE_GROUP_AND_PROJECTS_QUERY_HANDLER = jest.fn().mockResolvedValue({
+ data: {
+ existingGroup: null,
+ existingProject: null,
+ },
+});
+
describe('Bulk import resolvers', () => {
let axiosMockAdapter;
let client;
const createClient = (extraResolverArgs) => {
- return createMockClient({
+ const mockedClient = createMockClient({
cache: new InMemoryCache({
fragmentMatcher: { match: () => true },
addTypename: false,
}),
resolvers: createResolvers({ endpoints: FAKE_ENDPOINTS, ...extraResolverArgs }),
});
+
+ mockedClient.setRequestHandler(groupAndProjectQuery, FAKE_GROUP_AND_PROJECTS_QUERY_HANDLER);
+
+ return mockedClient;
};
beforeEach(() => {
@@ -196,6 +207,12 @@ describe('Bulk import resolvers', () => {
const [statusPoller] = StatusPoller.mock.instances;
expect(statusPoller.startPolling).toHaveBeenCalled();
});
+
+ it('requests validation status when request completes', async () => {
+ expect(FAKE_GROUP_AND_PROJECTS_QUERY_HANDLER).not.toHaveBeenCalled();
+ jest.runOnlyPendingTimers();
+ expect(FAKE_GROUP_AND_PROJECTS_QUERY_HANDLER).toHaveBeenCalled();
+ });
});
it.each`
@@ -256,40 +273,49 @@ describe('Bulk import resolvers', () => {
});
});
- it('setTargetNamespaces updates group target namespace', async () => {
- const NEW_TARGET_NAMESPACE = 'target';
- const {
- data: {
- setTargetNamespace: {
- id: idInResponse,
- import_target: { target_namespace: namespaceInResponse },
+ describe('setImportTarget', () => {
+ it('updates group target namespace and name', async () => {
+ const NEW_TARGET_NAMESPACE = 'target';
+ const NEW_NAME = 'new';
+
+ const {
+ data: {
+ setImportTarget: {
+ id: idInResponse,
+ import_target: { target_namespace: namespaceInResponse, new_name: newNameInResponse },
+ },
},
- },
- } = await client.mutate({
- mutation: setTargetNamespaceMutation,
- variables: { sourceGroupId: GROUP_ID, targetNamespace: NEW_TARGET_NAMESPACE },
+ } = await client.mutate({
+ mutation: setImportTargetMutation,
+ variables: {
+ sourceGroupId: GROUP_ID,
+ targetNamespace: NEW_TARGET_NAMESPACE,
+ newName: NEW_NAME,
+ },
+ });
+
+ expect(idInResponse).toBe(GROUP_ID);
+ expect(namespaceInResponse).toBe(NEW_TARGET_NAMESPACE);
+ expect(newNameInResponse).toBe(NEW_NAME);
});
- expect(idInResponse).toBe(GROUP_ID);
- expect(namespaceInResponse).toBe(NEW_TARGET_NAMESPACE);
- });
+ it('invokes validation', async () => {
+ const NEW_TARGET_NAMESPACE = 'target';
+ const NEW_NAME = 'new';
- it('setNewName updates group target name', async () => {
- const NEW_NAME = 'new';
- const {
- data: {
- setNewName: {
- id: idInResponse,
- import_target: { new_name: nameInResponse },
+ await client.mutate({
+ mutation: setImportTargetMutation,
+ variables: {
+ sourceGroupId: GROUP_ID,
+ targetNamespace: NEW_TARGET_NAMESPACE,
+ newName: NEW_NAME,
},
- },
- } = await client.mutate({
- mutation: setNewNameMutation,
- variables: { sourceGroupId: GROUP_ID, newName: NEW_NAME },
- });
+ });
- expect(idInResponse).toBe(GROUP_ID);
- expect(nameInResponse).toBe(NEW_NAME);
+ expect(FAKE_GROUP_AND_PROJECTS_QUERY_HANDLER).toHaveBeenCalledWith({
+ fullPath: `${NEW_TARGET_NAMESPACE}/${NEW_NAME}`,
+ });
+ });
});
describe('importGroup', () => {
diff --git a/spec/frontend/integrations/edit/components/dynamic_field_spec.js b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
index 8784b3c2b00..da8a2f41c1b 100644
--- a/spec/frontend/integrations/edit/components/dynamic_field_spec.js
+++ b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
@@ -182,6 +182,19 @@ describe('DynamicField', () => {
expect(findGlFormGroup().find('small').html()).toContain(helpHTML);
});
+
+ it('strips unsafe HTML from the help text', () => {
+ const helpHTML =
+ '[<code>1</code> <iframe>2</iframe> <a href="javascript:alert(document.cookie)">3</a> <a href="foo" target="_blank">4</a>]';
+
+ createComponent({
+ help: helpHTML,
+ });
+
+ expect(findGlFormGroup().find('small').html()).toContain(
+ '[<code>1</code> <a>3</a> <a target="_blank" href="foo">4</a>]',
+ );
+ });
});
describe('label text', () => {
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index cbce26762b1..ff602327592 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -278,6 +278,7 @@ describe('IntegrationForm', () => {
<svg class="gl-icon">
<use></use>
</svg>
+ <a data-confirm="Are you sure?" data-method="delete" href="/settings/slack"></a>
</div>
`);
@@ -291,9 +292,14 @@ describe('IntegrationForm', () => {
});
const helpHtml = wrapper.findByTestId(mockTestId);
+ const helpLink = helpHtml.find('a');
expect(helpHtml.isVisible()).toBe(true);
expect(helpHtml.find('svg').isVisible()).toBe(true);
+ expect(helpLink.attributes()).toMatchObject({
+ 'data-confirm': 'Are you sure?',
+ 'data-method': 'delete',
+ });
});
});
});
diff --git a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
new file mode 100644
index 00000000000..dbed236d7df
--- /dev/null
+++ b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
@@ -0,0 +1,146 @@
+import { GlTable, GlLink, GlPagination } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { DEFAULT_PER_PAGE } from '~/api';
+import createFlash from '~/flash';
+import IntegrationOverrides from '~/integrations/overrides/components/integration_overrides.vue';
+import axios from '~/lib/utils/axios_utils';
+import httpStatus from '~/lib/utils/http_status';
+import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
+
+jest.mock('~/flash');
+
+const mockOverrides = Array(DEFAULT_PER_PAGE * 3)
+ .fill(1)
+ .map((_, index) => ({
+ name: `test-proj-${index}`,
+ avatar_url: `avatar-${index}`,
+ full_path: `test-proj-${index}`,
+ full_name: `test-proj-${index}`,
+ }));
+
+describe('IntegrationOverrides', () => {
+ let wrapper;
+ let mockAxios;
+
+ const defaultProps = {
+ overridesPath: 'mock/overrides',
+ };
+
+ const createComponent = ({ mountFn = shallowMount } = {}) => {
+ wrapper = mountFn(IntegrationOverrides, {
+ propsData: defaultProps,
+ });
+ };
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, mockOverrides, {
+ 'X-TOTAL': mockOverrides.length,
+ 'X-PAGE': 1,
+ });
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ wrapper.destroy();
+ });
+
+ const findGlTable = () => wrapper.findComponent(GlTable);
+ const findPagination = () => wrapper.findComponent(GlPagination);
+ const findRowsAsModel = () =>
+ findGlTable()
+ .findAllComponents(GlLink)
+ .wrappers.map((link) => {
+ const avatar = link.findComponent(ProjectAvatar);
+
+ return {
+ href: link.attributes('href'),
+ avatarUrl: avatar.props('projectAvatarUrl'),
+ avatarName: avatar.props('projectName'),
+ text: link.text(),
+ };
+ });
+
+ describe('while loading', () => {
+ it('sets GlTable `busy` attribute to `true`', () => {
+ createComponent();
+
+ const table = findGlTable();
+ expect(table.exists()).toBe(true);
+ expect(table.attributes('busy')).toBe('true');
+ });
+ });
+
+ describe('when initial request is successful', () => {
+ it('sets GlTable `busy` attribute to `false`', async () => {
+ createComponent();
+ await waitForPromises();
+
+ const table = findGlTable();
+ expect(table.exists()).toBe(true);
+ expect(table.attributes('busy')).toBeFalsy();
+ });
+
+ describe('table template', () => {
+ beforeEach(async () => {
+ createComponent({ mountFn: mount });
+ await waitForPromises();
+ });
+
+ it('renders overrides as rows in table', () => {
+ expect(findRowsAsModel()).toEqual(
+ mockOverrides.map((x) => ({
+ href: x.full_path,
+ avatarUrl: x.avatar_url,
+ avatarName: x.name,
+ text: expect.stringContaining(x.full_name),
+ })),
+ );
+ });
+ });
+ });
+
+ describe('when request fails', () => {
+ beforeEach(async () => {
+ mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.INTERNAL_SERVER_ERROR);
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('calls createFlash', () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: IntegrationOverrides.i18n.defaultErrorMessage,
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+ });
+
+ describe('pagination', () => {
+ it('triggers fetch when `input` event is emitted', async () => {
+ createComponent();
+ jest.spyOn(axios, 'get');
+ await waitForPromises();
+
+ await findPagination().vm.$emit('input', 2);
+ expect(axios.get).toHaveBeenCalledWith(defaultProps.overridesPath, {
+ params: { page: 2, per_page: DEFAULT_PER_PAGE },
+ });
+ });
+
+ it('does not render with <=1 page', async () => {
+ mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, [mockOverrides[0]], {
+ 'X-TOTAL': 1,
+ 'X-PAGE': 1,
+ });
+
+ createComponent();
+ await waitForPromises();
+
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index b828b5d8a04..95b1c55b82d 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -6,6 +6,7 @@ import {
GlSprintf,
GlLink,
GlModal,
+ GlFormCheckboxGroup,
} from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { stubComponent } from 'helpers/stub_component';
@@ -15,7 +16,8 @@ import Api from '~/api';
import ExperimentTracking from '~/experimentation/experiment_tracking';
import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue';
import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
-import { INVITE_MEMBERS_IN_COMMENT } from '~/invite_members/constants';
+import { INVITE_MEMBERS_IN_COMMENT, MEMBER_AREAS_OF_FOCUS } from '~/invite_members/constants';
+import eventHub from '~/invite_members/event_hub';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
import { apiPaths, membersApiResponse, invitationsApiResponse } from '../mock_data/api_responses';
@@ -32,7 +34,12 @@ const inviteeType = 'members';
const accessLevels = { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 };
const defaultAccessLevel = 10;
const inviteSource = 'unknown';
+const noSelectionAreasOfFocus = ['no_selection'];
const helpLink = 'https://example.com';
+const areasOfFocusOptions = [
+ { text: 'area1', value: 'area1' },
+ { text: 'area2', value: 'area2' },
+];
const user1 = { id: 1, name: 'Name One', username: 'one_1', avatar_url: '' };
const user2 = { id: 2, name: 'Name Two', username: 'one_2', avatar_url: '' };
@@ -58,7 +65,9 @@ const createComponent = (data = {}, props = {}) => {
isProject,
inviteeType,
accessLevels,
+ areasOfFocusOptions,
defaultAccessLevel,
+ noSelectionAreasOfFocus,
helpLink,
...props,
},
@@ -74,7 +83,7 @@ const createComponent = (data = {}, props = {}) => {
GlDropdownItem: true,
GlSprintf,
GlFormGroup: stubComponent(GlFormGroup, {
- props: ['state', 'invalidFeedback'],
+ props: ['state', 'invalidFeedback', 'description'],
}),
},
});
@@ -116,9 +125,12 @@ describe('InviteMembersModal', () => {
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findInviteButton = () => wrapper.findByTestId('invite-button');
const clickInviteButton = () => findInviteButton().vm.$emit('click');
+ const clickCancelButton = () => findCancelButton().vm.$emit('click');
const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
const membersFormGroupInvalidFeedback = () => findMembersFormGroup().props('invalidFeedback');
+ const membersFormGroupDescription = () => findMembersFormGroup().props('description');
const findMembersSelect = () => wrapper.findComponent(MembersTokenSelect);
+ const findAreaofFocusCheckBoxGroup = () => wrapper.findComponent(GlFormCheckboxGroup);
describe('rendering the modal', () => {
beforeEach(() => {
@@ -137,6 +149,10 @@ describe('InviteMembersModal', () => {
expect(findInviteButton().text()).toBe('Invite');
});
+ it('renders the Invite button modal without isLoading', () => {
+ expect(findInviteButton().props('loading')).toBe(false);
+ });
+
describe('rendering the access levels dropdown', () => {
it('sets the default dropdown text to the default access level name', () => {
expect(findDropdown().attributes('text')).toBe('Guest');
@@ -160,13 +176,29 @@ describe('InviteMembersModal', () => {
});
});
- describe('displaying the correct introText', () => {
+ describe('rendering the areas_of_focus', () => {
+ it('renders the areas_of_focus checkboxes', () => {
+ createComponent();
+
+ expect(findAreaofFocusCheckBoxGroup().props('options')).toBe(areasOfFocusOptions);
+ expect(findAreaofFocusCheckBoxGroup().exists()).toBe(true);
+ });
+
+ it('does not render the areas_of_focus checkboxes', () => {
+ createComponent({}, { areasOfFocusOptions: [] });
+
+ expect(findAreaofFocusCheckBoxGroup().exists()).toBe(false);
+ });
+ });
+
+ describe('displaying the correct introText and form group description', () => {
describe('when inviting to a project', () => {
describe('when inviting members', () => {
it('includes the correct invitee, type, and formatted name', () => {
createInviteMembersToProjectWrapper();
expect(findIntroText()).toBe("You're inviting members to the test name project.");
+ expect(membersFormGroupDescription()).toBe('Select members or type email addresses');
});
});
@@ -175,6 +207,7 @@ describe('InviteMembersModal', () => {
createInviteGroupToProjectWrapper();
expect(findIntroText()).toBe("You're inviting a group to the test name project.");
+ expect(membersFormGroupDescription()).toBe('');
});
});
});
@@ -185,6 +218,7 @@ describe('InviteMembersModal', () => {
createInviteMembersToGroupWrapper();
expect(findIntroText()).toBe("You're inviting members to the test name group.");
+ expect(membersFormGroupDescription()).toBe('Select members or type email addresses');
});
});
@@ -193,6 +227,7 @@ describe('InviteMembersModal', () => {
createInviteGroupToGroupWrapper();
expect(findIntroText()).toBe("You're inviting a group to the test name group.");
+ expect(membersFormGroupDescription()).toBe('');
});
});
});
@@ -210,6 +245,20 @@ describe('InviteMembersModal', () => {
"email 'email@example.com' does not match the allowed domains: example1.org";
const expectedSyntaxError = 'email contains an invalid email address';
+ it('calls the API with the expected focus data when an areas_of_focus checkbox is clicked', () => {
+ const spy = jest.spyOn(Api, 'addGroupMembersByUserId');
+ const expectedFocus = [areasOfFocusOptions[0].value];
+ createComponent({ newUsersToInvite: [user1] });
+
+ findAreaofFocusCheckBoxGroup().vm.$emit('input', expectedFocus);
+ clickInviteButton();
+
+ expect(spy).toHaveBeenCalledWith(
+ user1.id.toString(),
+ expect.objectContaining({ areas_of_focus: expectedFocus }),
+ );
+ });
+
describe('when inviting an existing user to group by user ID', () => {
const postData = {
user_id: '1,2',
@@ -217,6 +266,7 @@ describe('InviteMembersModal', () => {
expires_at: undefined,
invite_source: inviteSource,
format: 'json',
+ areas_of_focus: noSelectionAreasOfFocus,
};
describe('when member is added successfully', () => {
@@ -226,20 +276,34 @@ describe('InviteMembersModal', () => {
wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
+ });
+
+ it('includes the non-default selected areas of focus', () => {
+ const focus = ['abc'];
+ const updatedPostData = { ...postData, areas_of_focus: focus };
+ wrapper.setData({ selectedAreasOfFocus: focus });
clickInviteButton();
+
+ expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, updatedPostData);
});
- it('calls Api addGroupMembersByUserId with the correct params', async () => {
- await waitForPromises;
+ describe('when triggered from regular mounting', () => {
+ beforeEach(() => {
+ clickInviteButton();
+ });
- expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, postData);
- });
+ it('sets isLoading on the Invite button when it is clicked', () => {
+ expect(findInviteButton().props('loading')).toBe(true);
+ });
- it('displays the successful toastMessage', async () => {
- await waitForPromises;
+ it('calls Api addGroupMembersByUserId with the correct params', () => {
+ expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, postData);
+ });
- expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ });
});
});
@@ -260,6 +324,51 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
expect(findMembersFormGroup().props('state')).toBe(false);
expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findInviteButton().props('loading')).toBe(false);
+ });
+
+ describe('clearing the invalid state and message', () => {
+ beforeEach(async () => {
+ mockMembersApi(httpStatus.CONFLICT, membersApiResponse.MEMBER_ALREADY_EXISTS);
+
+ clickInviteButton();
+
+ await waitForPromises();
+ });
+
+ it('clears the error when the list of members to invite is cleared', async () => {
+ expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
+ expect(findMembersFormGroup().props('state')).toBe(false);
+ expect(findMembersSelect().props('validationState')).toBe(false);
+
+ findMembersSelect().vm.$emit('clear');
+
+ await wrapper.vm.$nextTick();
+
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersFormGroup().props('state')).not.toBe(false);
+ expect(findMembersSelect().props('validationState')).not.toBe(false);
+ });
+
+ it('clears the error when the cancel button is clicked', async () => {
+ clickCancelButton();
+
+ await wrapper.vm.$nextTick();
+
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersFormGroup().props('state')).not.toBe(false);
+ expect(findMembersSelect().props('validationState')).not.toBe(false);
+ });
+
+ it('clears the error when the modal is hidden', async () => {
+ wrapper.findComponent(GlModal).vm.$emit('hide');
+
+ await wrapper.vm.$nextTick();
+
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersFormGroup().props('state')).not.toBe(false);
+ expect(findMembersSelect().props('validationState')).not.toBe(false);
+ });
});
it('clears the invalid state and message once the list of members to invite is cleared', async () => {
@@ -272,6 +381,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
expect(findMembersFormGroup().props('state')).toBe(false);
expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findInviteButton().props('loading')).toBe(false);
findMembersSelect().vm.$emit('clear');
@@ -280,6 +390,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersFormGroup().props('state')).not.toBe(false);
expect(findMembersSelect().props('validationState')).not.toBe(false);
+ expect(findInviteButton().props('loading')).toBe(false);
});
it('displays the generic error for http server error', async () => {
@@ -336,6 +447,7 @@ describe('InviteMembersModal', () => {
expires_at: undefined,
email: 'email@example.com',
invite_source: inviteSource,
+ areas_of_focus: noSelectionAreasOfFocus,
format: 'json',
};
@@ -346,16 +458,30 @@ describe('InviteMembersModal', () => {
wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
+ });
+
+ it('includes the non-default selected areas of focus', () => {
+ const focus = ['abc'];
+ const updatedPostData = { ...postData, areas_of_focus: focus };
+ wrapper.setData({ selectedAreasOfFocus: focus });
clickInviteButton();
- });
- it('calls Api inviteGroupMembersByEmail with the correct params', () => {
- expect(Api.inviteGroupMembersByEmail).toHaveBeenCalledWith(id, postData);
+ expect(Api.inviteGroupMembersByEmail).toHaveBeenCalledWith(id, updatedPostData);
});
- it('displays the successful toastMessage', () => {
- expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ describe('when triggered from regular mounting', () => {
+ beforeEach(() => {
+ clickInviteButton();
+ });
+
+ it('calls Api inviteGroupMembersByEmail with the correct params', () => {
+ expect(Api.inviteGroupMembersByEmail).toHaveBeenCalledWith(id, postData);
+ });
+
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ });
});
});
@@ -375,6 +501,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findInviteButton().props('loading')).toBe(false);
});
it('displays the restricted email error when restricted email is invited', async () => {
@@ -386,6 +513,7 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toContain(expectedEmailRestrictedError);
expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findInviteButton().props('loading')).toBe(false);
});
it('displays the successful toast message when email has already been invited', async () => {
@@ -446,6 +574,7 @@ describe('InviteMembersModal', () => {
access_level: defaultAccessLevel,
expires_at: undefined,
invite_source: inviteSource,
+ areas_of_focus: noSelectionAreasOfFocus,
format: 'json',
};
@@ -482,7 +611,7 @@ describe('InviteMembersModal', () => {
});
it('calls Apis with the invite source passed through to openModal', () => {
- wrapper.vm.openModal({ inviteeType: 'members', source: '_invite_source_' });
+ eventHub.$emit('openModal', { inviteeType: 'members', source: '_invite_source_' });
clickInviteButton();
@@ -548,9 +677,9 @@ describe('InviteMembersModal', () => {
describe('when sharing the group fails', () => {
beforeEach(() => {
- createComponent({ groupToBeSharedWith: sharedGroup });
+ createInviteGroupToGroupWrapper();
- wrapper.setData({ inviteeType: 'group' });
+ wrapper.setData({ groupToBeSharedWith: sharedGroup });
wrapper.vm.$toast = { show: jest.fn() };
jest
@@ -560,10 +689,9 @@ describe('InviteMembersModal', () => {
clickInviteButton();
});
- it('displays the generic error message', async () => {
- await waitForPromises();
-
+ it('displays the generic error message', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Something went wrong');
+ expect(membersFormGroupDescription()).toBe('');
});
});
});
@@ -577,7 +705,7 @@ describe('InviteMembersModal', () => {
});
it('tracks the invite', () => {
- wrapper.vm.openModal({ inviteeType: 'members', source: INVITE_MEMBERS_IN_COMMENT });
+ eventHub.$emit('openModal', { inviteeType: 'members', source: INVITE_MEMBERS_IN_COMMENT });
clickInviteButton();
@@ -586,19 +714,37 @@ describe('InviteMembersModal', () => {
});
it('does not track invite for unknown source', () => {
- wrapper.vm.openModal({ inviteeType: 'members', source: 'unknown' });
+ eventHub.$emit('openModal', { inviteeType: 'members', source: 'unknown' });
clickInviteButton();
- expect(ExperimentTracking).not.toHaveBeenCalled();
+ expect(ExperimentTracking).not.toHaveBeenCalledWith(INVITE_MEMBERS_IN_COMMENT);
});
it('does not track invite undefined source', () => {
- wrapper.vm.openModal({ inviteeType: 'members' });
+ eventHub.$emit('openModal', { inviteeType: 'members' });
+
+ clickInviteButton();
+
+ expect(ExperimentTracking).not.toHaveBeenCalledWith(INVITE_MEMBERS_IN_COMMENT);
+ });
+
+ it('tracks the view for areas_of_focus', () => {
+ eventHub.$emit('openModal', { inviteeType: 'members' });
+
+ expect(ExperimentTracking).toHaveBeenCalledWith(MEMBER_AREAS_OF_FOCUS.name);
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(MEMBER_AREAS_OF_FOCUS.view);
+ });
+
+ it('tracks the invite for areas_of_focus', () => {
+ eventHub.$emit('openModal', { inviteeType: 'members' });
clickInviteButton();
- expect(ExperimentTracking).not.toHaveBeenCalled();
+ expect(ExperimentTracking).toHaveBeenCalledWith(MEMBER_AREAS_OF_FOCUS.name);
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(
+ MEMBER_AREAS_OF_FOCUS.submit,
+ );
});
});
});
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index 12db7e42464..196a716d08c 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -12,11 +12,12 @@ const user1 = { id: 1, name: 'John Smith', username: 'one_1', avatar_url: '' };
const user2 = { id: 2, name: 'Jane Doe', username: 'two_2', avatar_url: '' };
const allUsers = [user1, user2];
-const createComponent = () => {
+const createComponent = (props) => {
return shallowMount(MembersTokenSelect, {
propsData: {
ariaLabelledby: label,
placeholder,
+ ...props,
},
stubs: {
GlTokenSelector: stubComponent(GlTokenSelector),
@@ -27,11 +28,6 @@ const createComponent = () => {
describe('MembersTokenSelect', () => {
let wrapper;
- beforeEach(() => {
- jest.spyOn(UserApi, 'getUsers').mockResolvedValue({ data: allUsers });
- wrapper = createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -41,6 +37,8 @@ describe('MembersTokenSelect', () => {
describe('rendering the token-selector component', () => {
it('renders with the correct props', () => {
+ wrapper = createComponent();
+
const expectedProps = {
ariaLabelledby: label,
placeholder,
@@ -51,6 +49,11 @@ describe('MembersTokenSelect', () => {
});
describe('users', () => {
+ beforeEach(() => {
+ jest.spyOn(UserApi, 'getUsers').mockResolvedValue({ data: allUsers });
+ wrapper = createComponent();
+ });
+
describe('when input is focused for the first time (modal auto-focus)', () => {
it('does not call the API', async () => {
findTokenSelector().vm.$emit('focus');
@@ -90,10 +93,10 @@ describe('MembersTokenSelect', () => {
await waitForPromises();
- expect(UserApi.getUsers).toHaveBeenCalledWith(
- searchParam,
- wrapper.vm.$options.queryOptions,
- );
+ expect(UserApi.getUsers).toHaveBeenCalledWith(searchParam, {
+ active: true,
+ exclude_internal: true,
+ });
expect(tokenSelector.props('hideDropdownWithNoItems')).toBe(false);
});
@@ -134,6 +137,8 @@ describe('MembersTokenSelect', () => {
describe('when text input is blurred', () => {
it('clears text input', async () => {
+ wrapper = createComponent();
+
const tokenSelector = findTokenSelector();
tokenSelector.vm.$emit('blur');
@@ -143,4 +148,33 @@ describe('MembersTokenSelect', () => {
expect(tokenSelector.props('hideDropdownWithNoItems')).toBe(false);
});
});
+
+ describe('when component is mounted for a group using a saml provider', () => {
+ const searchParam = 'name';
+ const samlProviderId = 123;
+ let resolveApiRequest;
+
+ beforeEach(() => {
+ jest.spyOn(UserApi, 'getUsers').mockImplementation(
+ () =>
+ new Promise((resolve) => {
+ resolveApiRequest = resolve;
+ }),
+ );
+
+ wrapper = createComponent({ filterId: samlProviderId, usersFilter: 'saml_provider_id' });
+
+ findTokenSelector().vm.$emit('text-input', searchParam);
+ });
+
+ it('calls the API with the saml provider ID param', () => {
+ resolveApiRequest({ data: allUsers });
+
+ expect(UserApi.getUsers).toHaveBeenCalledWith(searchParam, {
+ active: true,
+ exclude_internal: true,
+ saml_provider_id: samlProviderId,
+ });
+ });
+ });
});
diff --git a/spec/frontend/issue_show/components/app_spec.js b/spec/frontend/issue_show/components/app_spec.js
index 4c06f2dca1b..babe3a66578 100644
--- a/spec/frontend/issue_show/components/app_spec.js
+++ b/spec/frontend/issue_show/components/app_spec.js
@@ -2,7 +2,6 @@ import { GlIntersectionObserver } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
-import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
import '~/behaviors/markdown/render_gfm';
import IssuableApp from '~/issue_show/components/app.vue';
import DescriptionComponent from '~/issue_show/components/description.vue';
@@ -30,8 +29,6 @@ jest.mock('~/issue_show/event_hub');
const REALTIME_REQUEST_STACK = [initialRequest, secondRequest];
describe('Issuable output', () => {
- useMockIntersectionObserver();
-
let mock;
let realtimeRequestCount = 0;
let wrapper;
diff --git a/spec/frontend/issue_show/components/fields/type_spec.js b/spec/frontend/issue_show/components/fields/type_spec.js
index 0c8af60d50d..fac745716d7 100644
--- a/spec/frontend/issue_show/components/fields/type_spec.js
+++ b/spec/frontend/issue_show/components/fields/type_spec.js
@@ -1,4 +1,4 @@
-import { GlFormGroup, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlFormGroup, GlDropdown, GlDropdownItem, GlIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -35,6 +35,9 @@ describe('Issue type field component', () => {
const findTypeFromGroup = () => wrapper.findComponent(GlFormGroup);
const findTypeFromDropDown = () => wrapper.findComponent(GlDropdown);
const findTypeFromDropDownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findTypeFromDropDownItemAt = (at) => findTypeFromDropDownItems().at(at);
+ const findTypeFromDropDownItemIconAt = (at) =>
+ findTypeFromDropDownItems().at(at).findComponent(GlIcon);
const createComponent = ({ data } = {}) => {
fakeApollo = createMockApollo([], mockResolvers);
@@ -60,6 +63,15 @@ describe('Issue type field component', () => {
wrapper.destroy();
});
+ it.each`
+ at | text | icon
+ ${0} | ${IssuableTypes[0].text} | ${IssuableTypes[0].icon}
+ ${1} | ${IssuableTypes[1].text} | ${IssuableTypes[1].icon}
+ `(`renders the issue type $text with an icon in the dropdown`, ({ at, text, icon }) => {
+ expect(findTypeFromDropDownItemIconAt(at).attributes('name')).toBe(icon);
+ expect(findTypeFromDropDownItemAt(at).text()).toBe(text);
+ });
+
it('renders a form group with the correct label', () => {
expect(findTypeFromGroup().attributes('label')).toBe(i18n.label);
});
diff --git a/spec/frontend/issue_show/issue_spec.js b/spec/frontend/issue_show/issue_spec.js
index d043693b863..76989413edb 100644
--- a/spec/frontend/issue_show/issue_spec.js
+++ b/spec/frontend/issue_show/issue_spec.js
@@ -1,5 +1,4 @@
import MockAdapter from 'axios-mock-adapter';
-import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
import waitForPromises from 'helpers/wait_for_promises';
import { initIssuableApp } from '~/issue_show/issue';
import * as parseData from '~/issue_show/utils/parse_data';
@@ -10,8 +9,6 @@ import { appProps } from './mock_data/mock_data';
const mock = new MockAdapter(axios);
mock.onGet().reply(200);
-useMockIntersectionObserver();
-
jest.mock('~/lib/utils/poll');
const setupHTML = (initialData) => {
diff --git a/spec/frontend/issues_list/components/issuables_list_app_spec.js b/spec/frontend/issues_list/components/issuables_list_app_spec.js
index 86112dad444..5ef2a2e0525 100644
--- a/spec/frontend/issues_list/components/issuables_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issuables_list_app_spec.js
@@ -6,6 +6,7 @@ import {
import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
@@ -27,11 +28,6 @@ const TEST_ENDPOINT = '/issues';
const TEST_CREATE_ISSUES_PATH = '/createIssue';
const TEST_SVG_PATH = '/emptySvg';
-const setUrl = (query) => {
- window.location.href = `${TEST_LOCATION}${query}`;
- window.location.search = query;
-};
-
const MOCK_ISSUES = Array(PAGE_SIZE_MANUAL)
.fill(0)
.map((_, i) => ({
@@ -40,7 +36,6 @@ const MOCK_ISSUES = Array(PAGE_SIZE_MANUAL)
}));
describe('Issuables list component', () => {
- let oldLocation;
let mockAxios;
let wrapper;
let apiSpy;
@@ -75,19 +70,13 @@ describe('Issuables list component', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- oldLocation = window.location;
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: '', search: '' },
- });
- window.location.href = TEST_LOCATION;
+ setWindowLocation(TEST_LOCATION);
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
mockAxios.restore();
- window.location = oldLocation;
});
describe('with failed issues response', () => {
@@ -314,7 +303,7 @@ describe('Issuables list component', () => {
'?assignee_username=root&author_username=root&confidential=yes&label_name%5B%5D=Aquapod&label_name%5B%5D=Astro&milestone_title=v3.0&my_reaction_emoji=airplane&scope=all&sort=priority&state=opened&weight=0&not[label_name][]=Afterpod&not[milestone_title][]=13';
beforeEach(() => {
- setUrl(query);
+ setWindowLocation(query);
setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
factory({ sortKey: 'milestone_due_desc' });
@@ -358,7 +347,7 @@ describe('Issuables list component', () => {
'?assignee_username=root&author_username=root&confidential=yes&label_name%5B%5D=Aquapod&label_name%5B%5D=Astro&milestone_title=v3.0&my_reaction_emoji=airplane&scope=all&sort=priority&state=opened&weight=0&page=3';
beforeEach(() => {
- setUrl(query);
+ setWindowLocation(query);
setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
factory({ sortKey: 'milestone_due_desc' });
@@ -387,7 +376,7 @@ describe('Issuables list component', () => {
describe('with hash in window.location', () => {
beforeEach(() => {
- window.location.href = `${TEST_LOCATION}#stuff`;
+ setWindowLocation(`${TEST_LOCATION}#stuff`);
setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
factory();
return waitForPromises();
@@ -422,7 +411,7 @@ describe('Issuables list component', () => {
describe('with query in window location', () => {
beforeEach(() => {
- window.location.search = '?weight=Any';
+ setWindowLocation('?weight=Any');
factory();
@@ -436,7 +425,7 @@ describe('Issuables list component', () => {
describe('with closed state', () => {
beforeEach(() => {
- window.location.search = '?state=closed';
+ setWindowLocation('?state=closed');
factory();
@@ -450,7 +439,7 @@ describe('Issuables list component', () => {
describe('with all state', () => {
beforeEach(() => {
- window.location.search = '?state=all';
+ setWindowLocation('?state=all');
factory();
@@ -565,7 +554,7 @@ describe('Issuables list component', () => {
});
it('sets value according to query', () => {
- setUrl(query);
+ setWindowLocation(query);
factory({ type: 'jira' });
@@ -583,7 +572,7 @@ describe('Issuables list component', () => {
it('sets value according to query', () => {
const query = '?search=free+text';
- setUrl(query);
+ setWindowLocation(query);
factory({ type: 'jira' });
diff --git a/spec/frontend/issues_list/components/issue_card_time_info_spec.js b/spec/frontend/issues_list/components/issue_card_time_info_spec.js
index 634687e77ab..d195c159cbb 100644
--- a/spec/frontend/issues_list/components/issue_card_time_info_spec.js
+++ b/spec/frontend/issues_list/components/issue_card_time_info_spec.js
@@ -24,6 +24,7 @@ describe('IssuesListApp component', () => {
const findDueDate = () => wrapper.find('[data-testid="issuable-due-date"]');
const mountComponent = ({
+ closedAt = null,
dueDate = issue.dueDate,
milestoneDueDate = issue.milestone.dueDate,
milestoneStartDate = issue.milestone.startDate,
@@ -37,6 +38,7 @@ describe('IssuesListApp component', () => {
dueDate: milestoneDueDate,
startDate: milestoneStartDate,
},
+ closedAt,
dueDate,
},
},
@@ -87,10 +89,23 @@ describe('IssuesListApp component', () => {
});
describe('when in the past', () => {
- it('renders in red', () => {
- wrapper = mountComponent({ dueDate: new Date('2020-10-10') });
+ describe('when issue is open', () => {
+ it('renders in red', () => {
+ wrapper = mountComponent({ dueDate: new Date('2020-10-10') });
- expect(findDueDate().classes()).toContain('gl-text-red-500');
+ expect(findDueDate().classes()).toContain('gl-text-red-500');
+ });
+ });
+
+ describe('when issue is closed', () => {
+ it('does not render in red', () => {
+ wrapper = mountComponent({
+ dueDate: new Date('2020-10-10'),
+ closedAt: '2020-09-05T13:06:25Z',
+ });
+
+ expect(findDueDate().classes()).not.toContain('gl-text-red-500');
+ });
});
});
});
diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js
index 846236e1fb5..0cb1092135f 100644
--- a/spec/frontend/issues_list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issues_list_app_spec.js
@@ -7,6 +7,7 @@ import VueApollo from 'vue-apollo';
import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql';
import getIssuesCountQuery from 'ee_else_ce/issues_list/queries/get_issues_count.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import {
@@ -17,7 +18,7 @@ import {
getIssuesCountQueryResponse,
} from 'jest/issues_list/mock_data';
import createFlash from '~/flash';
-import { convertToGraphQLId } from '~/graphql_shared/utils';
+import { convertToGraphQLId, getIdFromGraphQLId } from '~/graphql_shared/utils';
import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
import IssuableByEmail from '~/issuable/components/issuable_by_email.vue';
import IssuableList from '~/issuable_list/components/issuable_list_root.vue';
@@ -35,6 +36,7 @@ import {
TOKEN_TYPE_LABEL,
TOKEN_TYPE_MILESTONE,
TOKEN_TYPE_MY_REACTION,
+ TOKEN_TYPE_TYPE,
TOKEN_TYPE_WEIGHT,
urlSortParams,
} from '~/issues_list/constants';
@@ -42,7 +44,7 @@ import eventHub from '~/issues_list/eventhub';
import { getSortOptions } from '~/issues_list/utils';
import axios from '~/lib/utils/axios_utils';
import { scrollUp } from '~/lib/utils/scroll_utils';
-import { setUrlParams } from '~/lib/utils/url_utility';
+import { joinPaths } from '~/lib/utils/url_utility';
jest.mock('~/flash');
jest.mock('~/lib/utils/scroll_utils', () => ({
@@ -115,11 +117,11 @@ describe('IssuesListApp component', () => {
};
beforeEach(() => {
+ setWindowLocation(TEST_HOST);
axiosMock = new AxiosMockAdapter(axios);
});
afterEach(() => {
- global.jsdom.reconfigure({ url: TEST_HOST });
axiosMock.reset();
wrapper.destroy();
});
@@ -186,7 +188,7 @@ describe('IssuesListApp component', () => {
const search = '?search=refactor&sort=created_date&state=opened';
beforeEach(() => {
- global.jsdom.reconfigure({ url: `${TEST_HOST}${search}` });
+ setWindowLocation(search);
wrapper = mountComponent({
provide: { ...defaultProvide, isSignedIn: true },
@@ -258,7 +260,7 @@ describe('IssuesListApp component', () => {
describe('initial url params', () => {
describe('due_date', () => {
it('is set from the url params', () => {
- global.jsdom.reconfigure({ url: `${TEST_HOST}?${PARAM_DUE_DATE}=${DUE_DATE_OVERDUE}` });
+ setWindowLocation(`?${PARAM_DUE_DATE}=${DUE_DATE_OVERDUE}`);
wrapper = mountComponent();
@@ -268,7 +270,7 @@ describe('IssuesListApp component', () => {
describe('search', () => {
it('is set from the url params', () => {
- global.jsdom.reconfigure({ url: `${TEST_HOST}${locationSearch}` });
+ setWindowLocation(locationSearch);
wrapper = mountComponent();
@@ -278,9 +280,7 @@ describe('IssuesListApp component', () => {
describe('sort', () => {
it.each(Object.keys(urlSortParams))('is set as %s from the url params', (sortKey) => {
- global.jsdom.reconfigure({
- url: setUrlParams({ sort: urlSortParams[sortKey] }, TEST_HOST),
- });
+ setWindowLocation(`?sort=${urlSortParams[sortKey]}`);
wrapper = mountComponent();
@@ -297,7 +297,7 @@ describe('IssuesListApp component', () => {
it('is set from the url params', () => {
const initialState = IssuableStates.All;
- global.jsdom.reconfigure({ url: setUrlParams({ state: initialState }, TEST_HOST) });
+ setWindowLocation(`?state=${initialState}`);
wrapper = mountComponent();
@@ -307,7 +307,7 @@ describe('IssuesListApp component', () => {
describe('filter tokens', () => {
it('is set from the url params', () => {
- global.jsdom.reconfigure({ url: `${TEST_HOST}${locationSearch}` });
+ setWindowLocation(locationSearch);
wrapper = mountComponent();
@@ -347,7 +347,7 @@ describe('IssuesListApp component', () => {
describe('when there are issues', () => {
describe('when search returns no results', () => {
beforeEach(() => {
- global.jsdom.reconfigure({ url: `${TEST_HOST}?search=no+results` });
+ setWindowLocation(`?search=no+results`);
wrapper = mountComponent({ provide: { hasProjectIssues: true }, mountFn: mount });
});
@@ -377,9 +377,7 @@ describe('IssuesListApp component', () => {
describe('when "Closed" tab has no issues', () => {
beforeEach(() => {
- global.jsdom.reconfigure({
- url: setUrlParams({ state: IssuableStates.Closed }, TEST_HOST),
- });
+ setWindowLocation(`?state=${IssuableStates.Closed}`);
wrapper = mountComponent({ provide: { hasProjectIssues: true }, mountFn: mount });
});
@@ -560,6 +558,7 @@ describe('IssuesListApp component', () => {
{ type: TOKEN_TYPE_ASSIGNEE, preloadedAuthors },
{ type: TOKEN_TYPE_MILESTONE },
{ type: TOKEN_TYPE_LABEL },
+ { type: TOKEN_TYPE_TYPE },
{ type: TOKEN_TYPE_MY_REACTION },
{ type: TOKEN_TYPE_CONFIDENTIAL },
{ type: TOKEN_TYPE_ITERATION },
@@ -625,25 +624,25 @@ describe('IssuesListApp component', () => {
const issueOne = {
...defaultQueryResponse.data.project.issues.nodes[0],
id: 'gid://gitlab/Issue/1',
- iid: 101,
+ iid: '101',
title: 'Issue one',
};
const issueTwo = {
...defaultQueryResponse.data.project.issues.nodes[0],
id: 'gid://gitlab/Issue/2',
- iid: 102,
+ iid: '102',
title: 'Issue two',
};
const issueThree = {
...defaultQueryResponse.data.project.issues.nodes[0],
id: 'gid://gitlab/Issue/3',
- iid: 103,
+ iid: '103',
title: 'Issue three',
};
const issueFour = {
...defaultQueryResponse.data.project.issues.nodes[0],
id: 'gid://gitlab/Issue/4',
- iid: 104,
+ iid: '104',
title: 'Issue four',
};
const response = {
@@ -662,9 +661,36 @@ describe('IssuesListApp component', () => {
jest.runOnlyPendingTimers();
});
+ describe('when successful', () => {
+ describe.each`
+ description | issueToMove | oldIndex | newIndex | moveBeforeId | moveAfterId
+ ${'to the beginning of the list'} | ${issueThree} | ${2} | ${0} | ${null} | ${issueOne.id}
+ ${'down the list'} | ${issueOne} | ${0} | ${1} | ${issueTwo.id} | ${issueThree.id}
+ ${'up the list'} | ${issueThree} | ${2} | ${1} | ${issueOne.id} | ${issueTwo.id}
+ ${'to the end of the list'} | ${issueTwo} | ${1} | ${3} | ${issueFour.id} | ${null}
+ `(
+ 'when moving issue $description',
+ ({ issueToMove, oldIndex, newIndex, moveBeforeId, moveAfterId }) => {
+ it('makes API call to reorder the issue', async () => {
+ findIssuableList().vm.$emit('reorder', { oldIndex, newIndex });
+
+ await waitForPromises();
+
+ expect(axiosMock.history.put[0]).toMatchObject({
+ url: joinPaths(defaultProvide.issuesPath, issueToMove.iid, 'reorder'),
+ data: JSON.stringify({
+ move_before_id: getIdFromGraphQLId(moveBeforeId),
+ move_after_id: getIdFromGraphQLId(moveAfterId),
+ }),
+ });
+ });
+ },
+ );
+ });
+
describe('when unsuccessful', () => {
it('displays an error message', async () => {
- axiosMock.onPut(`${defaultProvide.issuesPath}/${issueOne.iid}/reorder`).reply(500);
+ axiosMock.onPut(joinPaths(defaultProvide.issuesPath, issueOne.iid, 'reorder')).reply(500);
findIssuableList().vm.$emit('reorder', { oldIndex: 0, newIndex: 1 });
diff --git a/spec/frontend/issues_list/components/jira_issues_import_status_app_spec.js b/spec/frontend/issues_list/components/jira_issues_import_status_app_spec.js
index 0c96b95a61f..633799816d8 100644
--- a/spec/frontend/issues_list/components/jira_issues_import_status_app_spec.js
+++ b/spec/frontend/issues_list/components/jira_issues_import_status_app_spec.js
@@ -43,10 +43,12 @@ describe('JiraIssuesImportStatus', () => {
wrapper = null;
});
- describe('when Jira import is not in progress', () => {
- it('does not show an alert', () => {
+ describe('when Jira import is neither in progress nor finished', () => {
+ beforeEach(() => {
wrapper = mountComponent();
+ });
+ it('does not show an alert', () => {
expect(wrapper.find(GlAlert).exists()).toBe(false);
});
});
diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js
index fd59241fd1d..d3f3f2f9f23 100644
--- a/spec/frontend/issues_list/mock_data.js
+++ b/spec/frontend/issues_list/mock_data.js
@@ -23,6 +23,7 @@ export const getIssuesQueryResponse = {
downvotes: 2,
dueDate: '2021-05-29',
humanTimeEstimate: null,
+ mergeRequestsCount: false,
moved: false,
title: 'Issue title',
updatedAt: '2021-05-22T04:08:01Z',
@@ -106,8 +107,11 @@ export const locationSearch = [
export const locationSearchWithSpecialValues = [
'assignee_id=123',
'assignee_username=bart',
+ 'type[]=issue',
+ 'type[]=incident',
'my_reaction_emoji=None',
'iteration_id=Current',
+ 'milestone_title=Upcoming',
'epic_id=None',
'weight=None',
].join('&');
@@ -140,8 +144,11 @@ export const filteredTokens = [
export const filteredTokensWithSpecialValues = [
{ type: 'assignee_username', value: { data: '123', operator: OPERATOR_IS } },
{ type: 'assignee_username', value: { data: 'bart', operator: OPERATOR_IS } },
+ { type: 'type', value: { data: 'issue', operator: OPERATOR_IS } },
+ { type: 'type', value: { data: 'incident', operator: OPERATOR_IS } },
{ type: 'my_reaction_emoji', value: { data: 'None', operator: OPERATOR_IS } },
{ type: 'iteration', value: { data: 'Current', operator: OPERATOR_IS } },
+ { type: 'milestone', value: { data: 'Upcoming', operator: OPERATOR_IS } },
{ type: 'epic_id', value: { data: 'None', operator: OPERATOR_IS } },
{ type: 'weight', value: { data: 'None', operator: OPERATOR_IS } },
];
@@ -170,8 +177,10 @@ export const apiParams = {
export const apiParamsWithSpecialValues = {
assigneeId: '123',
assigneeUsernames: 'bart',
+ types: ['ISSUE', 'INCIDENT'],
myReactionEmoji: 'None',
iterationWildcardId: 'CURRENT',
+ milestoneWildcardId: 'UPCOMING',
epicId: 'None',
weight: 'None',
};
@@ -198,8 +207,10 @@ export const urlParams = {
export const urlParamsWithSpecialValues = {
assignee_id: '123',
'assignee_username[]': 'bart',
+ 'type[]': ['issue', 'incident'],
my_reaction_emoji: 'None',
iteration_id: 'Current',
+ milestone_title: 'Upcoming',
epic_id: 'None',
weight: 'None',
};
diff --git a/spec/frontend/issues_list/utils_spec.js b/spec/frontend/issues_list/utils_spec.js
index b7863068570..458776d9ec5 100644
--- a/spec/frontend/issues_list/utils_spec.js
+++ b/spec/frontend/issues_list/utils_spec.js
@@ -8,17 +8,36 @@ import {
urlParams,
urlParamsWithSpecialValues,
} from 'jest/issues_list/mock_data';
-import { DUE_DATE_VALUES, urlSortParams } from '~/issues_list/constants';
+import {
+ defaultPageSizeParams,
+ DUE_DATE_VALUES,
+ largePageSizeParams,
+ RELATIVE_POSITION_ASC,
+ urlSortParams,
+} from '~/issues_list/constants';
import {
convertToApiParams,
convertToSearchQuery,
convertToUrlParams,
getDueDateValue,
getFilterTokens,
+ getInitialPageParams,
getSortKey,
getSortOptions,
} from '~/issues_list/utils';
+describe('getInitialPageParams', () => {
+ it.each(Object.keys(urlSortParams))(
+ 'returns the correct page params for sort key %s',
+ (sortKey) => {
+ const expectedPageParams =
+ sortKey === RELATIVE_POSITION_ASC ? largePageSizeParams : defaultPageSizeParams;
+
+ expect(getInitialPageParams(sortKey)).toBe(expectedPageParams);
+ },
+ );
+});
+
describe('getSortKey', () => {
it.each(Object.keys(urlSortParams))('returns %s given the correct inputs', (sortKey) => {
const sort = urlSortParams[sortKey];
diff --git a/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js b/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
new file mode 100644
index 00000000000..7326b84ad54
--- /dev/null
+++ b/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
@@ -0,0 +1,236 @@
+import { GlAlert, GlForm, GlFormInput, GlButton } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import NewBranchForm from '~/jira_connect/branches/components/new_branch_form.vue';
+import ProjectDropdown from '~/jira_connect/branches/components/project_dropdown.vue';
+import SourceBranchDropdown from '~/jira_connect/branches/components/source_branch_dropdown.vue';
+import {
+ CREATE_BRANCH_ERROR_GENERIC,
+ CREATE_BRANCH_ERROR_WITH_CONTEXT,
+} from '~/jira_connect/branches/constants';
+import createBranchMutation from '~/jira_connect/branches/graphql/mutations/create_branch.mutation.graphql';
+
+const mockProject = {
+ id: 'test',
+ fullPath: 'test-path',
+ repository: {
+ branchNames: ['main', 'f-test', 'release'],
+ rootRef: 'main',
+ },
+};
+const mockCreateBranchMutationResponse = {
+ data: {
+ createBranch: {
+ clientMutationId: 1,
+ errors: [],
+ },
+ },
+};
+const mockCreateBranchMutationResponseWithErrors = {
+ data: {
+ createBranch: {
+ clientMutationId: 1,
+ errors: ['everything is broken, sorry.'],
+ },
+ },
+};
+const mockCreateBranchMutationSuccess = jest
+ .fn()
+ .mockResolvedValue(mockCreateBranchMutationResponse);
+const mockCreateBranchMutationWithErrors = jest
+ .fn()
+ .mockResolvedValue(mockCreateBranchMutationResponseWithErrors);
+const mockCreateBranchMutationFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+const mockMutationLoading = jest.fn().mockReturnValue(new Promise(() => {}));
+
+const localVue = createLocalVue();
+
+describe('NewBranchForm', () => {
+ let wrapper;
+
+ const findSourceBranchDropdown = () => wrapper.findComponent(SourceBranchDropdown);
+ const findProjectDropdown = () => wrapper.findComponent(ProjectDropdown);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findInput = () => wrapper.findComponent(GlFormInput);
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ const completeForm = async () => {
+ await findInput().vm.$emit('input', 'cool-branch-name');
+ await findProjectDropdown().vm.$emit('change', mockProject);
+ await findSourceBranchDropdown().vm.$emit('change', 'source-branch');
+ };
+
+ function createMockApolloProvider({
+ mockCreateBranchMutation = mockCreateBranchMutationSuccess,
+ } = {}) {
+ localVue.use(VueApollo);
+
+ const mockApollo = createMockApollo([[createBranchMutation, mockCreateBranchMutation]]);
+
+ return mockApollo;
+ }
+
+ function createComponent({ mockApollo, provide } = {}) {
+ wrapper = shallowMount(NewBranchForm, {
+ localVue,
+ apolloProvider: mockApollo || createMockApolloProvider(),
+ provide: {
+ initialBranchName: '',
+ ...provide,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when selecting items from dropdowns', () => {
+ describe('when a project is selected', () => {
+ it('sets the `selectedProject` prop for ProjectDropdown and SourceBranchDropdown', async () => {
+ createComponent();
+
+ const projectDropdown = findProjectDropdown();
+ await projectDropdown.vm.$emit('change', mockProject);
+
+ expect(projectDropdown.props('selectedProject')).toEqual(mockProject);
+ expect(findSourceBranchDropdown().props('selectedProject')).toEqual(mockProject);
+ });
+ });
+
+ describe('when a source branch is selected', () => {
+ it('sets the `selectedBranchName` prop for SourceBranchDropdown', async () => {
+ createComponent();
+
+ const mockBranchName = 'main';
+ const sourceBranchDropdown = findSourceBranchDropdown();
+ await sourceBranchDropdown.vm.$emit('change', mockBranchName);
+
+ expect(sourceBranchDropdown.props('selectedBranchName')).toBe(mockBranchName);
+ });
+ });
+ });
+
+ describe('when submitting form', () => {
+ describe('when form submission is loading', () => {
+ it('sets submit button `loading` prop to `true`', async () => {
+ createComponent({
+ mockApollo: createMockApolloProvider({
+ mockCreateBranchMutation: mockMutationLoading,
+ }),
+ });
+
+ await completeForm();
+
+ await findForm().vm.$emit('submit', new Event('submit'));
+ await waitForPromises();
+
+ expect(findButton().props('loading')).toBe(true);
+ });
+ });
+
+ describe('when form submission is successful', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await completeForm();
+
+ await findForm().vm.$emit('submit', new Event('submit'));
+ await waitForPromises();
+ });
+
+ it('emits `success` event', () => {
+ expect(wrapper.emitted('success')).toBeTruthy();
+ });
+
+ it('called `createBranch` mutation correctly', () => {
+ expect(mockCreateBranchMutationSuccess).toHaveBeenCalledWith({
+ name: 'cool-branch-name',
+ projectPath: mockProject.fullPath,
+ ref: 'source-branch',
+ });
+ });
+
+ it('sets submit button `loading` prop to `false`', () => {
+ expect(findButton().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when form submission fails', () => {
+ describe.each`
+ scenario | mutation | alertTitle | alertText
+ ${'with errors-as-data'} | ${mockCreateBranchMutationWithErrors} | ${CREATE_BRANCH_ERROR_WITH_CONTEXT} | ${mockCreateBranchMutationResponseWithErrors.data.createBranch.errors[0]}
+ ${'top-level error'} | ${mockCreateBranchMutationFailed} | ${''} | ${CREATE_BRANCH_ERROR_GENERIC}
+ `('', ({ mutation, alertTitle, alertText }) => {
+ beforeEach(async () => {
+ createComponent({
+ mockApollo: createMockApolloProvider({
+ mockCreateBranchMutation: mutation,
+ }),
+ });
+
+ await completeForm();
+
+ await findForm().vm.$emit('submit', new Event('submit'));
+ await waitForPromises();
+ });
+
+ it('displays an alert', () => {
+ const alert = findAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text()).toBe(alertText);
+ expect(alert.props()).toMatchObject({ title: alertTitle, variant: 'danger' });
+ });
+
+ it('sets submit button `loading` prop to `false`', () => {
+ expect(findButton().props('loading')).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('when `initialBranchName` is specified', () => {
+ it('sets value of branch name input to `initialBranchName` by default', () => {
+ const mockInitialBranchName = 'ap1-test-branch-name';
+
+ createComponent({ provide: { initialBranchName: mockInitialBranchName } });
+ expect(findInput().attributes('value')).toBe(mockInitialBranchName);
+ });
+ });
+
+ describe('error handling', () => {
+ describe.each`
+ component | componentName
+ ${SourceBranchDropdown} | ${'SourceBranchDropdown'}
+ ${ProjectDropdown} | ${'ProjectDropdown'}
+ `('when $componentName emits error', ({ component }) => {
+ const mockErrorMessage = 'oh noes!';
+
+ beforeEach(async () => {
+ createComponent();
+ await wrapper.findComponent(component).vm.$emit('error', { message: mockErrorMessage });
+ });
+
+ it('displays an alert', () => {
+ const alert = findAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text()).toBe(mockErrorMessage);
+ expect(alert.props('variant')).toBe('danger');
+ });
+
+ describe('when alert is dismissed', () => {
+ it('hides alert', async () => {
+ const alert = findAlert();
+ expect(alert.exists()).toBe(true);
+
+ await alert.vm.$emit('dismiss');
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/branches/pages/index_spec.js b/spec/frontend/jira_connect/branches/pages/index_spec.js
new file mode 100644
index 00000000000..92976dd28da
--- /dev/null
+++ b/spec/frontend/jira_connect/branches/pages/index_spec.js
@@ -0,0 +1,65 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import NewBranchForm from '~/jira_connect/branches/components/new_branch_form.vue';
+import {
+ I18N_PAGE_TITLE_WITH_BRANCH_NAME,
+ I18N_PAGE_TITLE_DEFAULT,
+} from '~/jira_connect/branches/constants';
+import JiraConnectNewBranchPage from '~/jira_connect/branches/pages/index.vue';
+import { sprintf } from '~/locale';
+
+describe('NewBranchForm', () => {
+ let wrapper;
+
+ const findPageTitle = () => wrapper.find('h1');
+ const findNewBranchForm = () => wrapper.findComponent(NewBranchForm);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ function createComponent({ provide } = {}) {
+ wrapper = shallowMount(JiraConnectNewBranchPage, {
+ provide: {
+ initialBranchName: '',
+ successStateSvgPath: '',
+ ...provide,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('page title', () => {
+ it.each`
+ initialBranchName | pageTitle
+ ${undefined} | ${I18N_PAGE_TITLE_DEFAULT}
+ ${'ap1-test-button'} | ${sprintf(I18N_PAGE_TITLE_WITH_BRANCH_NAME, { jiraIssue: 'ap1-test-button' })}
+ `(
+ 'sets page title to "$pageTitle" when initial branch name is "$initialBranchName"',
+ ({ initialBranchName, pageTitle }) => {
+ createComponent({ provide: { initialBranchName } });
+
+ expect(findPageTitle().text()).toBe(pageTitle);
+ },
+ );
+ });
+
+ it('renders NewBranchForm by default', () => {
+ createComponent();
+
+ expect(findNewBranchForm().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(false);
+ });
+
+ describe('when `sucesss` event emitted from NewBranchForm', () => {
+ it('renders the success state', async () => {
+ createComponent();
+
+ const newBranchForm = findNewBranchForm();
+ await newBranchForm.vm.$emit('success');
+
+ expect(findNewBranchForm().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/api_spec.js b/spec/frontend/jira_connect/subscriptions/api_spec.js
index 88922999715..57b11bdbc27 100644
--- a/spec/frontend/jira_connect/api_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/api_spec.js
@@ -1,10 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
-import { addSubscription, removeSubscription, fetchGroups } from '~/jira_connect/api';
-import { getJwt } from '~/jira_connect/utils';
+import { addSubscription, removeSubscription, fetchGroups } from '~/jira_connect/subscriptions/api';
+import { getJwt } from '~/jira_connect/subscriptions/utils';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
-jest.mock('~/jira_connect/utils', () => ({
+jest.mock('~/jira_connect/subscriptions/utils', () => ({
getJwt: jest.fn().mockResolvedValue('jwt'),
}));
diff --git a/spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap b/spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap
index 21c903f064d..21c903f064d 100644
--- a/spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap
+++ b/spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap
diff --git a/spec/frontend/jira_connect/components/app_spec.js b/spec/frontend/jira_connect/subscriptions/components/app_spec.js
index e0d61d8209b..8915a7697a5 100644
--- a/spec/frontend/jira_connect/components/app_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/app_spec.js
@@ -1,12 +1,12 @@
import { GlAlert, GlButton, GlModal, GlLink } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
-import JiraConnectApp from '~/jira_connect/components/app.vue';
-import createStore from '~/jira_connect/store';
-import { SET_ALERT } from '~/jira_connect/store/mutation_types';
+import JiraConnectApp from '~/jira_connect/subscriptions/components/app.vue';
+import createStore from '~/jira_connect/subscriptions/store';
+import { SET_ALERT } from '~/jira_connect/subscriptions/store/mutation_types';
import { __ } from '~/locale';
-jest.mock('~/jira_connect/utils', () => ({
+jest.mock('~/jira_connect/subscriptions/utils', () => ({
retrieveAlert: jest.fn().mockReturnValue({ message: 'error message' }),
getLocation: jest.fn(),
}));
diff --git a/spec/frontend/jira_connect/components/group_item_name_spec.js b/spec/frontend/jira_connect/subscriptions/components/group_item_name_spec.js
index ea0067f8ed1..b5fe08486b1 100644
--- a/spec/frontend/jira_connect/components/group_item_name_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/group_item_name_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import GroupItemName from '~/jira_connect/components/group_item_name.vue';
+import GroupItemName from '~/jira_connect/subscriptions/components/group_item_name.vue';
import { mockGroup1 } from '../mock_data';
describe('GroupItemName', () => {
diff --git a/spec/frontend/jira_connect/components/groups_list_item_spec.js b/spec/frontend/jira_connect/subscriptions/components/groups_list_item_spec.js
index bcc27cc2898..b69435df83a 100644
--- a/spec/frontend/jira_connect/components/groups_list_item_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/groups_list_item_spec.js
@@ -2,13 +2,13 @@ import { GlButton } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
-import * as JiraConnectApi from '~/jira_connect/api';
-import GroupItemName from '~/jira_connect/components/group_item_name.vue';
-import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
-import { persistAlert, reloadPage } from '~/jira_connect/utils';
+import * as JiraConnectApi from '~/jira_connect/subscriptions/api';
+import GroupItemName from '~/jira_connect/subscriptions/components/group_item_name.vue';
+import GroupsListItem from '~/jira_connect/subscriptions/components/groups_list_item.vue';
+import { persistAlert, reloadPage } from '~/jira_connect/subscriptions/utils';
import { mockGroup1 } from '../mock_data';
-jest.mock('~/jira_connect/utils');
+jest.mock('~/jira_connect/subscriptions/utils');
describe('GroupsListItem', () => {
let wrapper;
diff --git a/spec/frontend/jira_connect/components/groups_list_spec.js b/spec/frontend/jira_connect/subscriptions/components/groups_list_spec.js
index d583fb68771..d3a9a3bfd41 100644
--- a/spec/frontend/jira_connect/components/groups_list_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/groups_list_spec.js
@@ -2,10 +2,10 @@ import { GlAlert, GlLoadingIcon, GlSearchBoxByType, GlPagination } from '@gitlab
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { fetchGroups } from '~/jira_connect/api';
-import GroupsList from '~/jira_connect/components/groups_list.vue';
-import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
-import { DEFAULT_GROUPS_PER_PAGE } from '~/jira_connect/constants';
+import { fetchGroups } from '~/jira_connect/subscriptions/api';
+import GroupsList from '~/jira_connect/subscriptions/components/groups_list.vue';
+import GroupsListItem from '~/jira_connect/subscriptions/components/groups_list_item.vue';
+import { DEFAULT_GROUPS_PER_PAGE } from '~/jira_connect/subscriptions/constants';
import { mockGroup1, mockGroup2 } from '../mock_data';
const createMockGroup = (groupId) => {
@@ -19,7 +19,7 @@ const createMockGroups = (count) => {
return [...new Array(count)].map((_, idx) => createMockGroup(idx));
};
-jest.mock('~/jira_connect/api', () => {
+jest.mock('~/jira_connect/subscriptions/api', () => {
return {
fetchGroups: jest.fn(),
};
diff --git a/spec/frontend/jira_connect/components/subscriptions_list_spec.js b/spec/frontend/jira_connect/subscriptions/components/subscriptions_list_spec.js
index ff86969367d..32b43765843 100644
--- a/spec/frontend/jira_connect/components/subscriptions_list_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/subscriptions_list_spec.js
@@ -2,14 +2,14 @@ import { GlButton, GlEmptyState, GlTable } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
-import * as JiraConnectApi from '~/jira_connect/api';
-import SubscriptionsList from '~/jira_connect/components/subscriptions_list.vue';
-import createStore from '~/jira_connect/store';
-import { SET_ALERT } from '~/jira_connect/store/mutation_types';
-import { reloadPage } from '~/jira_connect/utils';
+import * as JiraConnectApi from '~/jira_connect/subscriptions/api';
+import SubscriptionsList from '~/jira_connect/subscriptions/components/subscriptions_list.vue';
+import createStore from '~/jira_connect/subscriptions/store';
+import { SET_ALERT } from '~/jira_connect/subscriptions/store/mutation_types';
+import { reloadPage } from '~/jira_connect/subscriptions/utils';
import { mockSubscription } from '../mock_data';
-jest.mock('~/jira_connect/utils');
+jest.mock('~/jira_connect/subscriptions/utils');
describe('SubscriptionsList', () => {
let wrapper;
diff --git a/spec/frontend/jira_connect/index_spec.js b/spec/frontend/jira_connect/subscriptions/index_spec.js
index 0161cfa0273..786f3b4a7d3 100644
--- a/spec/frontend/jira_connect/index_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/index_spec.js
@@ -1,6 +1,6 @@
-import { initJiraConnect } from '~/jira_connect';
+import { initJiraConnect } from '~/jira_connect/subscriptions';
-jest.mock('~/jira_connect/utils', () => ({
+jest.mock('~/jira_connect/subscriptions/utils', () => ({
getLocation: jest.fn().mockResolvedValue('test/location'),
}));
diff --git a/spec/frontend/jira_connect/mock_data.js b/spec/frontend/jira_connect/subscriptions/mock_data.js
index 5247a3dc522..5247a3dc522 100644
--- a/spec/frontend/jira_connect/mock_data.js
+++ b/spec/frontend/jira_connect/subscriptions/mock_data.js
diff --git a/spec/frontend/jira_connect/store/mutations_spec.js b/spec/frontend/jira_connect/subscriptions/store/mutations_spec.js
index 584b17b36f7..84a33dbf0b5 100644
--- a/spec/frontend/jira_connect/store/mutations_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/store/mutations_spec.js
@@ -1,5 +1,5 @@
-import mutations from '~/jira_connect/store/mutations';
-import state from '~/jira_connect/store/state';
+import mutations from '~/jira_connect/subscriptions/store/mutations';
+import state from '~/jira_connect/subscriptions/store/state';
describe('JiraConnect store mutations', () => {
let localState;
diff --git a/spec/frontend/jira_connect/utils_spec.js b/spec/frontend/jira_connect/subscriptions/utils_spec.js
index 7eae870478d..2dd95de1b8c 100644
--- a/spec/frontend/jira_connect/utils_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/utils_spec.js
@@ -1,6 +1,6 @@
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
-import { ALERT_LOCALSTORAGE_KEY } from '~/jira_connect/constants';
+import { ALERT_LOCALSTORAGE_KEY } from '~/jira_connect/subscriptions/constants';
import {
persistAlert,
retrieveAlert,
@@ -8,7 +8,7 @@ import {
getLocation,
reloadPage,
sizeToParent,
-} from '~/jira_connect/utils';
+} from '~/jira_connect/subscriptions/utils';
describe('JiraConnect utils', () => {
describe('alert utils', () => {
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/jobs/components/log/mock_data.js
index 76c35703106..3ff0bd73581 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/jobs/components/log/mock_data.js
@@ -123,6 +123,15 @@ export const multipleCollapsibleSectionsMockData = [
},
];
+export const backwardsCompatibilityTrace = [
+ {
+ offset: 2365,
+ content: [],
+ section: 'download-artifacts',
+ section_duration: '00:01',
+ },
+];
+
export const originalTrace = [
{
offset: 1,
diff --git a/spec/frontend/jobs/components/stages_dropdown_spec.js b/spec/frontend/jobs/components/stages_dropdown_spec.js
index b75d1707a8d..b0e95a2d5b6 100644
--- a/spec/frontend/jobs/components/stages_dropdown_spec.js
+++ b/spec/frontend/jobs/components/stages_dropdown_spec.js
@@ -20,6 +20,7 @@ describe('Stages Dropdown', () => {
const findPipelineInfoText = () => wrapper.findByTestId('pipeline-info').text();
const findPipelinePath = () => wrapper.findByTestId('pipeline-path').attributes('href');
const findMRLinkPath = () => wrapper.findByTestId('mr-link').attributes('href');
+ const findCopySourceBranchBtn = () => wrapper.findByTestId('copy-source-ref-link');
const findSourceBranchLinkPath = () =>
wrapper.findByTestId('source-branch-link').attributes('href');
const findTargetBranchLinkPath = () =>
@@ -70,6 +71,10 @@ describe('Stages Dropdown', () => {
expect(actual).toBe(expected);
});
+
+ it(`renders the source ref copy button`, () => {
+ expect(findCopySourceBranchBtn().exists()).toBe(true);
+ });
});
describe('with an "attached" merge request pipeline', () => {
@@ -103,6 +108,10 @@ describe('Stages Dropdown', () => {
mockPipelineWithAttachedMR.merge_request.target_branch_path,
);
});
+
+ it(`renders the source ref copy button`, () => {
+ expect(findCopySourceBranchBtn().exists()).toBe(true);
+ });
});
describe('with a detached merge request pipeline', () => {
@@ -130,5 +139,9 @@ describe('Stages Dropdown', () => {
mockPipelineDetached.merge_request.source_branch_path,
);
});
+
+ it(`renders the source ref copy button`, () => {
+ expect(findCopySourceBranchBtn().exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index 35ac2945ab5..0c5fa150002 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -19,6 +19,7 @@ import {
collapsibleTrace,
collapsibleTraceIncremental,
multipleCollapsibleSectionsMockData,
+ backwardsCompatibilityTrace,
} from '../components/log/mock_data';
describe('Jobs Store Utils', () => {
@@ -297,6 +298,21 @@ describe('Jobs Store Utils', () => {
expect(result.parsedLines[1].lines).toEqual(expect.arrayContaining(innerSection));
});
});
+
+ describe('backwards compatibility', () => {
+ beforeEach(() => {
+ result = logLinesParser(backwardsCompatibilityTrace);
+ });
+
+ it('should return an object with a parsedLines prop', () => {
+ expect(result).toEqual(
+ expect.objectContaining({
+ parsedLines: expect.any(Array),
+ }),
+ );
+ expect(result.parsedLines).toHaveLength(1);
+ });
+ });
});
describe('findOffsetAndRemove', () => {
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 66d0faa95e7..c8ac7ffc9d9 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -1,3 +1,4 @@
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -16,24 +17,11 @@ const shas = {
],
};
-const setWindowLocation = (value) => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value,
- });
-};
+beforeEach(() => {
+ setWindowLocation(TEST_HOST);
+});
describe('URL utility', () => {
- let originalLocation;
-
- beforeAll(() => {
- originalLocation = window.location;
- });
-
- afterAll(() => {
- window.location = originalLocation;
- });
-
describe('webIDEUrl', () => {
afterEach(() => {
gon.relative_url_root = '';
@@ -68,14 +56,7 @@ describe('URL utility', () => {
describe('getParameterValues', () => {
beforeEach(() => {
- setWindowLocation({
- href: 'https://gitlab.com?test=passing&multiple=1&multiple=2',
- // make our fake location act like real window.location.toString
- // URL() (used in getParameterValues) does this if passed an object
- toString() {
- return this.href;
- },
- });
+ setWindowLocation('https://gitlab.com?test=passing&multiple=1&multiple=2');
});
it('returns empty array for no params', () => {
@@ -330,9 +311,7 @@ describe('URL utility', () => {
describe('doesHashExistInUrl', () => {
beforeEach(() => {
- setWindowLocation({
- hash: 'https://gitlab.com/gitlab-org/gitlab-test/issues/1#note_1',
- });
+ setWindowLocation('#note_1');
});
it('should return true when the given string exists in the URL hash', () => {
@@ -442,10 +421,7 @@ describe('URL utility', () => {
describe('getBaseURL', () => {
beforeEach(() => {
- setWindowLocation({
- protocol: 'https:',
- host: 'gitlab.com',
- });
+ setWindowLocation('https://gitlab.com');
});
it('returns correct base URL', () => {
@@ -637,10 +613,7 @@ describe('URL utility', () => {
${'http:'} | ${'ws:'}
${'https:'} | ${'wss:'}
`('returns "$expectation" with "$protocol" protocol', ({ protocol, expectation }) => {
- setWindowLocation({
- protocol,
- host: 'example.com',
- });
+ setWindowLocation(`${protocol}//example.com`);
expect(urlUtils.getWebSocketProtocol()).toEqual(expectation);
});
@@ -648,10 +621,7 @@ describe('URL utility', () => {
describe('getWebSocketUrl', () => {
it('joins location host to path', () => {
- setWindowLocation({
- protocol: 'http:',
- host: 'example.com',
- });
+ setWindowLocation('http://example.com');
const path = '/lorem/ipsum?a=bc';
@@ -700,21 +670,23 @@ describe('URL utility', () => {
describe('queryToObject', () => {
it.each`
- case | query | options | result
- ${'converts query'} | ${'?one=1&two=2'} | ${undefined} | ${{ one: '1', two: '2' }}
- ${'converts query without ?'} | ${'one=1&two=2'} | ${undefined} | ${{ one: '1', two: '2' }}
- ${'removes undefined values'} | ${'?one=1&two=2&three'} | ${undefined} | ${{ one: '1', two: '2' }}
- ${'overwrites values with same key and does not change key'} | ${'?one[]=1&one[]=2&two=2&two=3'} | ${undefined} | ${{ 'one[]': '2', two: '3' }}
- ${'gathers values with the same array-key, strips `[]` from key'} | ${'?one[]=1&one[]=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: ['1', '2'], two: '3' }}
- ${'overwrites values with the same array-key name'} | ${'?one=1&one[]=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: ['2'], two: '3' }}
- ${'overwrites values with the same key name'} | ${'?one[]=1&one=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: '2', two: '3' }}
- ${'ignores plus symbols'} | ${'?search=a+b'} | ${{ legacySpacesDecode: true }} | ${{ search: 'a+b' }}
- ${'ignores plus symbols in keys'} | ${'?search+term=a'} | ${{ legacySpacesDecode: true }} | ${{ 'search+term': 'a' }}
- ${'ignores plus symbols when gathering arrays'} | ${'?search[]=a+b'} | ${{ gatherArrays: true, legacySpacesDecode: true }} | ${{ search: ['a+b'] }}
- ${'replaces plus symbols with spaces'} | ${'?search=a+b'} | ${undefined} | ${{ search: 'a b' }}
- ${'replaces plus symbols in keys with spaces'} | ${'?search+term=a'} | ${undefined} | ${{ 'search term': 'a' }}
- ${'replaces plus symbols when gathering arrays'} | ${'?search[]=a+b'} | ${{ gatherArrays: true }} | ${{ search: ['a b'] }}
- ${'replaces plus symbols when gathering arrays for values with same key'} | ${'?search[]=a+b&search[]=c+d'} | ${{ gatherArrays: true }} | ${{ search: ['a b', 'c d'] }}
+ case | query | options | result
+ ${'converts query'} | ${'?one=1&two=2'} | ${undefined} | ${{ one: '1', two: '2' }}
+ ${'converts query without ?'} | ${'one=1&two=2'} | ${undefined} | ${{ one: '1', two: '2' }}
+ ${'removes undefined values'} | ${'?one=1&two=2&three'} | ${undefined} | ${{ one: '1', two: '2' }}
+ ${'overwrites values with same key and does not change key'} | ${'?one[]=1&one[]=2&two=2&two=3'} | ${undefined} | ${{ 'one[]': '2', two: '3' }}
+ ${'gathers values with the same array-key, strips `[]` from key'} | ${'?one[]=1&one[]=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: ['1', '2'], two: '3' }}
+ ${'overwrites values with the same array-key name'} | ${'?one=1&one[]=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: ['2'], two: '3' }}
+ ${'overwrites values with the same key name'} | ${'?one[]=1&one=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: '2', two: '3' }}
+ ${'ignores plus symbols'} | ${'?search=a+b'} | ${{ legacySpacesDecode: true }} | ${{ search: 'a+b' }}
+ ${'ignores plus symbols in keys'} | ${'?search+term=a'} | ${{ legacySpacesDecode: true }} | ${{ 'search+term': 'a' }}
+ ${'ignores plus symbols when gathering arrays'} | ${'?search[]=a+b'} | ${{ gatherArrays: true, legacySpacesDecode: true }} | ${{ search: ['a+b'] }}
+ ${'replaces plus symbols with spaces'} | ${'?search=a+b'} | ${undefined} | ${{ search: 'a b' }}
+ ${'replaces plus symbols in keys with spaces'} | ${'?search+term=a'} | ${undefined} | ${{ 'search term': 'a' }}
+ ${'preserves square brackets in array params'} | ${'?search[]=a&search[]=b'} | ${{ gatherArrays: true }} | ${{ search: ['a', 'b'] }}
+ ${'decodes encoded square brackets in array params'} | ${'?search%5B%5D=a&search%5B%5D=b'} | ${{ gatherArrays: true }} | ${{ search: ['a', 'b'] }}
+ ${'replaces plus symbols when gathering arrays'} | ${'?search[]=a+b'} | ${{ gatherArrays: true }} | ${{ search: ['a b'] }}
+ ${'replaces plus symbols when gathering arrays for values with same key'} | ${'?search[]=a+b&search[]=c+d'} | ${{ gatherArrays: true }} | ${{ search: ['a b', 'c d'] }}
`('$case', ({ query, options, result }) => {
expect(urlUtils.queryToObject(query, options)).toEqual(result);
});
@@ -724,32 +696,32 @@ describe('URL utility', () => {
const { getParameterByName } = urlUtils;
it('should return valid parameter', () => {
- setWindowLocation({ search: '?scope=all&p=2' });
+ setWindowLocation('?scope=all&p=2');
expect(getParameterByName('p')).toEqual('2');
expect(getParameterByName('scope')).toBe('all');
});
it('should return invalid parameter', () => {
- setWindowLocation({ search: '?scope=all&p=2' });
+ setWindowLocation('?scope=all&p=2');
expect(getParameterByName('fakeParameter')).toBe(null);
});
it('should return a parameter with spaces', () => {
- setWindowLocation({ search: '?search=my terms' });
+ setWindowLocation('?search=my terms');
expect(getParameterByName('search')).toBe('my terms');
});
it('should return a parameter with encoded spaces', () => {
- setWindowLocation({ search: '?search=my%20terms' });
+ setWindowLocation('?search=my%20terms');
expect(getParameterByName('search')).toBe('my terms');
});
it('should return a parameter with plus signs as spaces', () => {
- setWindowLocation({ search: '?search=my+terms' });
+ setWindowLocation('?search=my+terms');
expect(getParameterByName('search')).toBe('my terms');
});
@@ -842,18 +814,20 @@ describe('URL utility', () => {
});
describe('urlIsDifferent', () => {
+ const current = 'http://current.test/';
+
beforeEach(() => {
- setWindowLocation('current');
+ setWindowLocation(current);
});
it('should compare against the window location if no compare value is provided', () => {
expect(urlUtils.urlIsDifferent('different')).toBeTruthy();
- expect(urlUtils.urlIsDifferent('current')).toBeFalsy();
+ expect(urlUtils.urlIsDifferent(current)).toBeFalsy();
});
it('should use the provided compare value', () => {
- expect(urlUtils.urlIsDifferent('different', 'current')).toBeTruthy();
- expect(urlUtils.urlIsDifferent('current', 'current')).toBeFalsy();
+ expect(urlUtils.urlIsDifferent('different', current)).toBeTruthy();
+ expect(urlUtils.urlIsDifferent(current, current)).toBeFalsy();
});
});
@@ -944,9 +918,8 @@ describe('URL utility', () => {
it.each([[httpProtocol], [httpsProtocol]])(
'when no url passed, returns correct protocol for %i from window location',
(protocol) => {
- setWindowLocation({
- protocol,
- });
+ setWindowLocation(`${protocol}//test.host`);
+
expect(urlUtils.getHTTPProtocol()).toBe(protocol.slice(0, -1));
},
);
@@ -979,10 +952,8 @@ describe('URL utility', () => {
describe('getURLOrigin', () => {
it('when no url passed, returns correct origin from window location', () => {
- const origin = 'https://foo.bar';
-
- setWindowLocation({ origin });
- expect(urlUtils.getURLOrigin()).toBe(origin);
+ setWindowLocation('https://user:pass@origin.test:1234/foo/bar?foo=1#bar');
+ expect(urlUtils.getURLOrigin()).toBe('https://origin.test:1234');
});
it.each`
@@ -1032,10 +1003,6 @@ describe('URL utility', () => {
// eslint-disable-next-line no-script-url
const javascriptUrl = 'javascript:alert(1)';
- beforeEach(() => {
- setWindowLocation({ origin: TEST_HOST });
- });
-
it.each`
url | expected
${TEST_HOST} | ${true}
diff --git a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
index e7a99a96da6..79252456f67 100644
--- a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
@@ -37,7 +37,7 @@ describe('InviteActionButtons', () => {
});
it('sets props correctly', () => {
- expect(findRemoveMemberButton().props()).toEqual({
+ expect(findRemoveMemberButton().props()).toMatchObject({
memberId: member.id,
memberType: null,
message: `Are you sure you want to revoke the invitation for ${member.invite.email} to join "${member.source.fullName}"`,
diff --git a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
index 4ff12f7fa97..d8453d453e7 100644
--- a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
@@ -1,6 +1,8 @@
+import { GlButton } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { modalData } from 'jest/members/mock_data';
import RemoveMemberButton from '~/members/components/action_buttons/remove_member_button.vue';
import { MEMBER_TYPES } from '~/members/constants';
@@ -10,6 +12,10 @@ localVue.use(Vuex);
describe('RemoveMemberButton', () => {
let wrapper;
+ const actions = {
+ showRemoveMemberModal: jest.fn(),
+ };
+
const createStore = (state = {}) => {
return new Vuex.Store({
modules: {
@@ -19,6 +25,7 @@ describe('RemoveMemberButton', () => {
memberPath: '/groups/foo-bar/-/group_members/:id',
...state,
},
+ actions,
},
},
});
@@ -47,20 +54,16 @@ describe('RemoveMemberButton', () => {
});
};
+ beforeEach(() => {
+ createComponent();
+ });
+
afterEach(() => {
wrapper.destroy();
});
it('sets attributes on button', () => {
- createComponent();
-
expect(wrapper.attributes()).toMatchObject({
- 'data-member-path': '/groups/foo-bar/-/group_members/1',
- 'data-member-type': 'GroupMember',
- 'data-message': 'Are you sure you want to remove John Smith?',
- 'data-is-access-request': 'true',
- 'data-is-invite': 'true',
- 'data-oncall-schedules': '{"name":"user","schedules":[]}',
'aria-label': 'Remove member',
title: 'Remove member',
icon: 'remove',
@@ -68,14 +71,12 @@ describe('RemoveMemberButton', () => {
});
it('displays `title` prop as a tooltip', () => {
- createComponent();
-
expect(getBinding(wrapper.element, 'gl-tooltip')).not.toBeUndefined();
});
- it('has CSS class used by `remove_member_modal.vue`', () => {
- createComponent();
+ it('calls Vuex action to show `remove member` modal when clicked', () => {
+ wrapper.findComponent(GlButton).vm.$emit('click');
- expect(wrapper.classes()).toContain('js-remove-member-button');
+ expect(actions.showRemoveMemberModal).toHaveBeenCalledWith(expect.any(Object), modalData);
});
});
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index a3b91cb20bb..3f47fa024bc 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -1,11 +1,23 @@
import { GlFilteredSearchToken } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { redirectTo } from '~/lib/utils/url_utility';
import MembersFilteredSearchBar from '~/members/components/filter_sort/members_filtered_search_bar.vue';
import { MEMBER_TYPES } from '~/members/constants';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+jest.mock('~/lib/utils/url_utility', () => {
+ const urlUtility = jest.requireActual('~/lib/utils/url_utility');
+
+ return {
+ __esModule: true,
+ ...urlUtility,
+ redirectTo: jest.fn(),
+ };
+});
+
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -113,12 +125,11 @@ describe('MembersFilteredSearchBar', () => {
describe('when filters are set via query params', () => {
beforeEach(() => {
- delete window.location;
- window.location = new URL('https://localhost');
+ setWindowLocation('https://localhost');
});
it('parses and passes tokens to `FilteredSearchBar` component as `initialFilterValue` prop', () => {
- window.location.search = '?two_factor=enabled&token_not_available=foobar';
+ setWindowLocation('?two_factor=enabled&token_not_available=foobar');
createComponent();
@@ -134,7 +145,7 @@ describe('MembersFilteredSearchBar', () => {
});
it('parses and passes search param to `FilteredSearchBar` component as `initialFilterValue` prop', () => {
- window.location.search = '?search=foobar';
+ setWindowLocation('?search=foobar');
createComponent();
@@ -149,7 +160,7 @@ describe('MembersFilteredSearchBar', () => {
});
it('parses and passes search param with multiple words to `FilteredSearchBar` component as `initialFilterValue` prop', () => {
- window.location.search = '?search=foo+bar+baz';
+ setWindowLocation('?search=foo+bar+baz');
createComponent();
@@ -166,8 +177,7 @@ describe('MembersFilteredSearchBar', () => {
describe('when filter bar is submitted', () => {
beforeEach(() => {
- delete window.location;
- window.location = new URL('https://localhost');
+ setWindowLocation('https://localhost');
});
it('adds correct filter query params', () => {
@@ -177,7 +187,7 @@ describe('MembersFilteredSearchBar', () => {
{ type: 'two_factor', value: { data: 'enabled', operator: '=' } },
]);
- expect(window.location.href).toBe('https://localhost/?two_factor=enabled');
+ expect(redirectTo).toHaveBeenCalledWith('https://localhost/?two_factor=enabled');
});
it('adds search query param', () => {
@@ -188,7 +198,9 @@ describe('MembersFilteredSearchBar', () => {
{ type: 'filtered-search-term', value: { data: 'foobar' } },
]);
- expect(window.location.href).toBe('https://localhost/?two_factor=enabled&search=foobar');
+ expect(redirectTo).toHaveBeenCalledWith(
+ 'https://localhost/?two_factor=enabled&search=foobar',
+ );
});
it('adds search query param with multiple words', () => {
@@ -199,11 +211,13 @@ describe('MembersFilteredSearchBar', () => {
{ type: 'filtered-search-term', value: { data: 'foo bar baz' } },
]);
- expect(window.location.href).toBe('https://localhost/?two_factor=enabled&search=foo+bar+baz');
+ expect(redirectTo).toHaveBeenCalledWith(
+ 'https://localhost/?two_factor=enabled&search=foo+bar+baz',
+ );
});
it('adds sort query param', () => {
- window.location.search = '?sort=name_asc';
+ setWindowLocation('?sort=name_asc');
createComponent();
@@ -212,13 +226,13 @@ describe('MembersFilteredSearchBar', () => {
{ type: 'filtered-search-term', value: { data: 'foobar' } },
]);
- expect(window.location.href).toBe(
+ expect(redirectTo).toHaveBeenCalledWith(
'https://localhost/?two_factor=enabled&search=foobar&sort=name_asc',
);
});
it('adds active tab query param', () => {
- window.location.search = '?tab=invited';
+ setWindowLocation('?tab=invited');
createComponent();
@@ -226,7 +240,7 @@ describe('MembersFilteredSearchBar', () => {
{ type: 'filtered-search-term', value: { data: 'foobar' } },
]);
- expect(window.location.href).toBe('https://localhost/?search=foobar&tab=invited');
+ expect(redirectTo).toHaveBeenCalledWith('https://localhost/?search=foobar&tab=invited');
});
});
});
diff --git a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
index 4b335755980..d0684acd487 100644
--- a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
+++ b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlSorting, GlSortingItem } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import * as urlUtilities from '~/lib/utils/url_utility';
import SortDropdown from '~/members/components/filter_sort/sort_dropdown.vue';
import { MEMBER_TYPES } from '~/members/constants';
@@ -52,17 +53,16 @@ describe('SortDropdown', () => {
.findAll(GlSortingItem)
.wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.text() === text);
- describe('dropdown options', () => {
- beforeEach(() => {
- delete window.location;
- window.location = new URL(URL_HOST);
- });
+ beforeEach(() => {
+ setWindowLocation(URL_HOST);
+ });
+ describe('dropdown options', () => {
it('adds dropdown items for all the sortable fields', () => {
const URL_FILTER_PARAMS = '?two_factor=enabled&search=foobar';
const EXPECTED_BASE_URL = `${URL_HOST}${URL_FILTER_PARAMS}&sort=`;
- window.location.search = URL_FILTER_PARAMS;
+ setWindowLocation(URL_FILTER_PARAMS);
const expectedDropdownItems = [
{
@@ -94,7 +94,7 @@ describe('SortDropdown', () => {
});
it('checks selected sort option', () => {
- window.location.search = '?sort=access_level_asc';
+ setWindowLocation('?sort=access_level_asc');
createComponent();
@@ -103,11 +103,6 @@ describe('SortDropdown', () => {
});
describe('dropdown toggle', () => {
- beforeEach(() => {
- delete window.location;
- window.location = new URL(URL_HOST);
- });
-
it('defaults to sorting by "Account" in ascending order', () => {
createComponent();
@@ -116,7 +111,7 @@ describe('SortDropdown', () => {
});
it('sets text as selected sort option', () => {
- window.location.search = '?sort=access_level_asc';
+ setWindowLocation('?sort=access_level_asc');
createComponent();
@@ -126,15 +121,12 @@ describe('SortDropdown', () => {
describe('sort direction toggle', () => {
beforeEach(() => {
- delete window.location;
- window.location = new URL(URL_HOST);
-
- jest.spyOn(urlUtilities, 'visitUrl');
+ jest.spyOn(urlUtilities, 'visitUrl').mockImplementation();
});
describe('when current sort direction is ascending', () => {
beforeEach(() => {
- window.location.search = '?sort=access_level_asc';
+ setWindowLocation('?sort=access_level_asc');
createComponent();
});
@@ -152,7 +144,7 @@ describe('SortDropdown', () => {
describe('when current sort direction is descending', () => {
beforeEach(() => {
- window.location.search = '?sort=access_level_desc';
+ setWindowLocation('?sort=access_level_desc');
createComponent();
});
diff --git a/spec/frontend/members/components/members_tabs_spec.js b/spec/frontend/members/components/members_tabs_spec.js
index 33d8eebf7eb..1d882e5ef09 100644
--- a/spec/frontend/members/components/members_tabs_spec.js
+++ b/spec/frontend/members/components/members_tabs_spec.js
@@ -1,6 +1,7 @@
-import { GlTabs } from '@gitlab/ui';
+import { GlTabs, GlButton } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import MembersApp from '~/members/components/app.vue';
import MembersTabs from '~/members/components/members_tabs.vue';
@@ -16,7 +17,7 @@ describe('MembersTabs', () => {
let wrapper;
- const createComponent = ({ totalItems = 10, options = {} } = {}) => {
+ const createComponent = ({ totalItems = 10, provide = {} } = {}) => {
const store = new Vuex.Store({
modules: {
[MEMBER_TYPES.user]: {
@@ -78,8 +79,10 @@ describe('MembersTabs', () => {
stubs: ['members-app'],
provide: {
canManageMembers: true,
+ canExportMembers: true,
+ exportCsvPath: '',
+ ...provide,
},
- ...options,
});
return nextTick();
@@ -88,10 +91,10 @@ describe('MembersTabs', () => {
const findTabs = () => wrapper.findAllByRole('tab').wrappers;
const findTabByText = (text) => findTabs().find((tab) => tab.text().includes(text));
const findActiveTab = () => wrapper.findByRole('tab', { selected: true });
+ const findExportButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
- delete window.location;
- window.location = new URL('https://localhost');
+ setWindowLocation('https://localhost');
});
afterEach(() => {
@@ -151,7 +154,7 @@ describe('MembersTabs', () => {
describe('when url param matches `filteredSearchBar.searchParam`', () => {
beforeEach(() => {
- window.location.search = '?search_groups=foo+bar';
+ setWindowLocation('?search_groups=foo+bar');
});
it('shows tab that corresponds to search param', async () => {
@@ -164,7 +167,7 @@ describe('MembersTabs', () => {
describe('when `canManageMembers` is `false`', () => {
it('shows all tabs except `Invited` and `Access requests`', async () => {
- await createComponent({ options: { provide: { canManageMembers: false } } });
+ await createComponent({ provide: { canManageMembers: false } });
expect(findTabByText('Members')).not.toBeUndefined();
expect(findTabByText('Groups')).not.toBeUndefined();
@@ -172,4 +175,20 @@ describe('MembersTabs', () => {
expect(findTabByText('Access requests')).toBeUndefined();
});
});
+
+ describe('when `canExportMembers` is true', () => {
+ it('shows the CSV export button with export path', async () => {
+ await createComponent({ provide: { canExportMembers: true, exportCsvPath: 'foo' } });
+
+ expect(findExportButton().attributes('href')).toBe('foo');
+ });
+ });
+
+ describe('when `canExportMembers` is false', () => {
+ it('does not show the CSV export button', async () => {
+ await createComponent({ provide: { canExportMembers: false } });
+
+ expect(findExportButton().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/remove_member_modal_spec.js b/spec/frontend/members/components/modals/remove_member_modal_spec.js
index ce9de28d53c..1dc41582c12 100644
--- a/spec/frontend/vue_shared/components/remove_member_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_member_modal_spec.js
@@ -1,37 +1,61 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import RemoveMemberModal from '~/members/components/modals/remove_member_modal.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
-import RemoveMemberModal from '~/vue_shared/components/remove_member_modal.vue';
-const mockSchedules = JSON.stringify({
- schedules: [
- {
- id: 1,
- name: 'Schedule 1',
- },
- ],
- name: 'User1',
-});
+Vue.use(Vuex);
describe('RemoveMemberModal', () => {
const memberPath = '/gitlab-org/gitlab-test/-/project_members/90';
+ const mockSchedules = {
+ name: 'User1',
+ schedules: [{ id: 1, name: 'Schedule 1' }],
+ };
let wrapper;
+ const actions = {
+ hideRemoveMemberModal: jest.fn(),
+ };
+
+ const createStore = (removeMemberModalData) =>
+ new Vuex.Store({
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ removeMemberModalData,
+ },
+ actions,
+ },
+ },
+ });
+
+ const createComponent = (state) => {
+ wrapper = shallowMount(RemoveMemberModal, {
+ store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
+ });
+ };
+
const findForm = () => wrapper.find({ ref: 'form' });
const findGlModal = () => wrapper.findComponent(GlModal);
const findOnCallSchedulesList = () => wrapper.findComponent(OncallSchedulesList);
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe.each`
- state | memberType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | onCallSchedules
- ${'removing a group member'} | ${'GroupMember'} | ${false} | ${'false'} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${`{}`}
- ${'removing a project member'} | ${'ProjectMember'} | ${false} | ${'false'} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
- ${'denying an access request'} | ${'ProjectMember'} | ${true} | ${'false'} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${`{}`}
- ${'revoking invite'} | ${'ProjectMember'} | ${false} | ${'true'} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
+ state | memberType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | onCallSchedules
+ ${'removing a group member'} | ${'GroupMember'} | ${false} | ${false} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${{}}
+ ${'removing a project member'} | ${'ProjectMember'} | ${false} | ${false} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
+ ${'denying an access request'} | ${'ProjectMember'} | ${true} | ${false} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${{}}
+ ${'revoking invite'} | ${'ProjectMember'} | ${false} | ${true} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
`(
'when $state',
({
@@ -45,24 +69,17 @@ describe('RemoveMemberModal', () => {
onCallSchedules,
}) => {
beforeEach(() => {
- wrapper = shallowMount(RemoveMemberModal, {
- data() {
- return {
- modalData: {
- isAccessRequest,
- isInvite,
- message,
- memberPath,
- memberType,
- onCallSchedules,
- },
- };
- },
+ createComponent({
+ isAccessRequest,
+ isInvite,
+ message,
+ memberPath,
+ memberType,
+ onCallSchedules,
});
});
- const parsedSchedules = JSON.parse(onCallSchedules);
- const isPartOfOncallSchedules = Boolean(isAccessRequest && parsedSchedules.schedules?.length);
+ const isPartOfOncallSchedules = Boolean(isAccessRequest && onCallSchedules.schedules?.length);
it(`has the title ${actionText}`, () => {
expect(findGlModal().attributes('title')).toBe(actionText);
@@ -73,7 +90,7 @@ describe('RemoveMemberModal', () => {
});
it('displays a message to the user', () => {
- expect(wrapper.find('[data-testid=modal-message]').text()).toBe(message);
+ expect(wrapper.find('p').text()).toBe(message);
});
it(`shows ${
@@ -105,6 +122,12 @@ describe('RemoveMemberModal', () => {
spy.mockRestore();
});
+
+ it('calls Vuex action to hide the modal when `GlModal` emits `hide` event', () => {
+ findGlModal().vm.$emit('hide');
+
+ expect(actions.hideRemoveMemberModal).toHaveBeenCalled();
+ });
},
);
});
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 3a17d78bd17..6885da53b26 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -6,6 +6,7 @@ import {
} from '@testing-library/dom';
import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import CreatedAt from '~/members/components/table/created_at.vue';
import ExpirationDatepicker from '~/members/components/table/expiration_datepicker.vue';
@@ -72,6 +73,7 @@ describe('MembersTable', () => {
'member-action-buttons',
'role-dropdown',
'remove-group-link-modal',
+ 'remove-member-modal',
'expiration-datepicker',
],
});
@@ -242,12 +244,8 @@ describe('MembersTable', () => {
});
describe('when required pagination data is provided', () => {
- beforeEach(() => {
- delete window.location;
- });
-
it('renders `gl-pagination` component with correct props', () => {
- window.location = new URL(url);
+ setWindowLocation(url);
createComponent();
@@ -267,7 +265,7 @@ describe('MembersTable', () => {
});
it('uses `pagination.paramName` to generate the pagination links', () => {
- window.location = new URL(url);
+ setWindowLocation(url);
createComponent({
pagination: {
@@ -282,7 +280,7 @@ describe('MembersTable', () => {
});
it('removes any url params defined as `null` in the `params` attribute', () => {
- window.location = new URL(`${url}&search_groups=foo`);
+ setWindowLocation(`${url}&search_groups=foo`);
createComponent({
pagination: {
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index 4275db5fa9f..eb9f905fea2 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -57,6 +57,15 @@ export const group = {
validRoles: { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 },
};
+export const modalData = {
+ isAccessRequest: true,
+ isInvite: true,
+ memberPath: '/groups/foo-bar/-/group_members/1',
+ memberType: 'GroupMember',
+ message: 'Are you sure you want to remove John Smith?',
+ oncallSchedules: { name: 'user', schedules: [] },
+};
+
const { user, ...memberNoUser } = member;
export const invite = {
...memberNoUser,
diff --git a/spec/frontend/members/store/actions_spec.js b/spec/frontend/members/store/actions_spec.js
index d913c5c56df..d37e6871387 100644
--- a/spec/frontend/members/store/actions_spec.js
+++ b/spec/frontend/members/store/actions_spec.js
@@ -3,12 +3,14 @@ import MockAdapter from 'axios-mock-adapter';
import { noop } from 'lodash';
import { useFakeDate } from 'helpers/fake_date';
import testAction from 'helpers/vuex_action_helper';
-import { members, group } from 'jest/members/mock_data';
+import { members, group, modalData } from 'jest/members/mock_data';
import httpStatusCodes from '~/lib/utils/http_status';
import {
updateMemberRole,
showRemoveGroupLinkModal,
hideRemoveGroupLinkModal,
+ showRemoveMemberModal,
+ hideRemoveMemberModal,
updateMemberExpiration,
} from '~/members/store/actions';
import * as types from '~/members/store/mutation_types';
@@ -153,4 +155,32 @@ describe('Vuex members actions', () => {
});
});
});
+
+ describe('Remove member modal', () => {
+ const state = {
+ removeMemberModalVisible: false,
+ removeMemberModalData: {},
+ };
+
+ describe('showRemoveMemberModal', () => {
+ it(`commits ${types.SHOW_REMOVE_MEMBER_MODAL} mutation`, () => {
+ testAction(showRemoveMemberModal, modalData, state, [
+ {
+ type: types.SHOW_REMOVE_MEMBER_MODAL,
+ payload: modalData,
+ },
+ ]);
+ });
+ });
+
+ describe('hideRemoveMemberModal', () => {
+ it(`commits ${types.HIDE_REMOVE_MEMBER_MODAL} mutation`, () => {
+ testAction(hideRemoveMemberModal, undefined, state, [
+ {
+ type: types.HIDE_REMOVE_MEMBER_MODAL,
+ },
+ ]);
+ });
+ });
+ });
});
diff --git a/spec/frontend/members/store/mutations_spec.js b/spec/frontend/members/store/mutations_spec.js
index 78bbad394a0..8160cc373d8 100644
--- a/spec/frontend/members/store/mutations_spec.js
+++ b/spec/frontend/members/store/mutations_spec.js
@@ -1,4 +1,4 @@
-import { members, group } from 'jest/members/mock_data';
+import { members, group, modalData } from 'jest/members/mock_data';
import * as types from '~/members/store/mutation_types';
import mutations from '~/members/store/mutations';
@@ -152,4 +152,32 @@ describe('Vuex members mutations', () => {
expect(state.removeGroupLinkModalVisible).toBe(false);
});
});
+
+ describe(types.SHOW_REMOVE_MEMBER_MODAL, () => {
+ it('sets `removeMemberModalVisible` and `removeMemberModalData`', () => {
+ const state = {
+ removeMemberModalVisible: false,
+ removeMemberModalData: {},
+ };
+
+ mutations[types.SHOW_REMOVE_MEMBER_MODAL](state, modalData);
+
+ expect(state).toEqual({
+ removeMemberModalVisible: true,
+ removeMemberModalData: modalData,
+ });
+ });
+ });
+
+ describe(types.HIDE_REMOVE_MEMBER_MODAL, () => {
+ it('sets `removeMemberModalVisible` to `false`', () => {
+ const state = {
+ removeMemberModalVisible: true,
+ };
+
+ mutations[types.HIDE_REMOVE_MEMBER_MODAL](state);
+
+ expect(state.removeMemberModalVisible).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index 9740e1c2edb..a157cfa1c1d 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -1,3 +1,4 @@
+import setWindowLocation from 'helpers/set_window_location_helper';
import { DEFAULT_SORT, MEMBER_TYPES } from '~/members/constants';
import {
generateBadges,
@@ -150,21 +151,18 @@ describe('Members Utils', () => {
describe('parseSortParam', () => {
beforeEach(() => {
- delete window.location;
- window.location = new URL(URL_HOST);
+ setWindowLocation(URL_HOST);
});
describe('when `sort` param is not present', () => {
it('returns default sort options', () => {
- window.location.search = '';
-
expect(parseSortParam(['account'])).toEqual(DEFAULT_SORT);
});
});
describe('when field passed in `sortableFields` argument does not have `sort` key defined', () => {
it('returns default sort options', () => {
- window.location.search = '?sort=source_asc';
+ setWindowLocation('?sort=source_asc');
expect(parseSortParam(['source'])).toEqual(DEFAULT_SORT);
});
@@ -182,7 +180,7 @@ describe('Members Utils', () => {
${'oldest_sign_in'} | ${{ sortByKey: 'lastSignIn', sortDesc: true }}
`('when `sort` query string param is `$sortParam`', ({ sortParam, expected }) => {
it(`returns ${JSON.stringify(expected)}`, async () => {
- window.location.search = `?sort=${sortParam}`;
+ setWindowLocation(`?sort=${sortParam}`);
expect(parseSortParam(['account', 'granted', 'expires', 'maxRole', 'lastSignIn'])).toEqual(
expected,
@@ -193,8 +191,7 @@ describe('Members Utils', () => {
describe('buildSortHref', () => {
beforeEach(() => {
- delete window.location;
- window.location = new URL(URL_HOST);
+ setWindowLocation(URL_HOST);
});
describe('when field passed in `sortBy` argument does not have `sort` key defined', () => {
@@ -225,7 +222,7 @@ describe('Members Utils', () => {
describe('when filter params are set', () => {
it('merges the `sort` param with the filter params', () => {
- window.location.search = '?two_factor=enabled&with_inherited_permissions=exclude';
+ setWindowLocation('?two_factor=enabled&with_inherited_permissions=exclude');
expect(
buildSortHref({
@@ -240,7 +237,7 @@ describe('Members Utils', () => {
describe('when search param is set', () => {
it('merges the `sort` param with the search param', () => {
- window.location.search = '?search=foobar';
+ setWindowLocation('?search=foobar');
expect(
buildSortHref({
diff --git a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
index dbb9fd5f603..f2116c1f478 100644
--- a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
@@ -301,9 +301,6 @@ describe('Actions menu', () => {
});
it('redirects to the newly created dashboard', () => {
- delete window.location;
- window.location = new URL('https://localhost');
-
const newDashboard = dashboardGitResponse[1];
const newDashboardUrl = 'root/sandbox/-/metrics/dashboard.yml';
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 7ca1b97d849..f899580b3df 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import VueDraggable from 'vuedraggable';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createFlash from '~/flash';
@@ -226,32 +227,25 @@ describe('Dashboard', () => {
});
describe('when the URL contains a reference to a panel', () => {
- let location;
+ const location = window.location.href;
- const setSearch = (search) => {
- window.location = { ...location, search };
+ const setSearch = (searchParams) => {
+ setWindowLocation(`?${objectToQuery(searchParams)}`);
};
- beforeEach(() => {
- location = window.location;
- delete window.location;
- });
-
afterEach(() => {
- window.location = location;
+ setWindowLocation(location);
});
it('when the URL points to a panel it expands', () => {
const panelGroup = metricsDashboardViewModel.panelGroups[0];
const panel = panelGroup.panels[0];
- setSearch(
- objectToQuery({
- group: panelGroup.group,
- title: panel.title,
- y_label: panel.y_label,
- }),
- );
+ setSearch({
+ group: panelGroup.group,
+ title: panel.title,
+ y_label: panel.y_label,
+ });
createMountedWrapper({ hasMetrics: true });
setupStoreWithData(store);
@@ -268,7 +262,7 @@ describe('Dashboard', () => {
});
it('when the URL does not link to any panel, no panel is expanded', () => {
- setSearch('');
+ setSearch();
createMountedWrapper({ hasMetrics: true });
setupStoreWithData(store);
@@ -285,13 +279,11 @@ describe('Dashboard', () => {
const panelGroup = metricsDashboardViewModel.panelGroups[0];
const panel = panelGroup.panels[0];
- setSearch(
- objectToQuery({
- group: panelGroup.group,
- title: 'incorrect',
- y_label: panel.y_label,
- }),
- );
+ setSearch({
+ group: panelGroup.group,
+ title: 'incorrect',
+ y_label: panel.y_label,
+ });
createMountedWrapper({ hasMetrics: true });
setupStoreWithData(store);
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 25ae4dcd702..31975052077 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -448,7 +448,7 @@ describe('monitoring/utils', () => {
input | urlParams
${[]} | ${''}
${[{ name: 'env', value: 'prod' }]} | ${'?var-env=prod'}
- ${[{ name: 'env1', value: 'prod' }, { name: 'env2', value: null }]} | ${'?var-env=prod&var-env1=prod'}
+ ${[{ name: 'env1', value: 'prod' }, { name: 'env2', value: null }]} | ${'?var-env1=prod'}
`(
'setCustomVariablesFromUrl updates history with query "$urlParams" with input $input',
({ input, urlParams }) => {
diff --git a/spec/frontend/nav/components/responsive_app_spec.js b/spec/frontend/nav/components/responsive_app_spec.js
index e1b443745e3..4af8c6020bc 100644
--- a/spec/frontend/nav/components/responsive_app_spec.js
+++ b/spec/frontend/nav/components/responsive_app_spec.js
@@ -3,16 +3,10 @@ import ResponsiveApp from '~/nav/components/responsive_app.vue';
import ResponsiveHeader from '~/nav/components/responsive_header.vue';
import ResponsiveHome from '~/nav/components/responsive_home.vue';
import TopNavContainerView from '~/nav/components/top_nav_container_view.vue';
-import eventHub, { EVENT_RESPONSIVE_TOGGLE } from '~/nav/event_hub';
import { resetMenuItemsActive } from '~/nav/utils/reset_menu_items_active';
import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
import { TEST_NAV_DATA } from '../mock_data';
-const HTML_HEADER_CONTENT = '<div class="header-content"></div>';
-const HTML_MENU_EXPANDED = '<div class="menu-expanded"></div>';
-const HTML_HEADER_WITH_MENU_EXPANDED =
- '<div></div><div class="header-content menu-expanded"></div>';
-
describe('~/nav/components/responsive_app.vue', () => {
let wrapper;
@@ -26,13 +20,10 @@ describe('~/nav/components/responsive_app.vue', () => {
},
});
};
- const triggerResponsiveToggle = () => eventHub.$emit(EVENT_RESPONSIVE_TOGGLE);
-
const findHome = () => wrapper.findComponent(ResponsiveHome);
const findMobileOverlay = () => wrapper.find('[data-testid="mobile-overlay"]');
const findSubviewHeader = () => wrapper.findComponent(ResponsiveHeader);
const findSubviewContainer = () => wrapper.findComponent(TopNavContainerView);
- const hasBodyResponsiveOpen = () => document.body.classList.contains('top-nav-responsive-open');
const hasMobileOverlayVisible = () => findMobileOverlay().classes('mobile-nav-open');
beforeEach(() => {
@@ -58,23 +49,6 @@ describe('~/nav/components/responsive_app.vue', () => {
});
it.each`
- bodyHtml | expectation
- ${''} | ${false}
- ${HTML_HEADER_CONTENT} | ${false}
- ${HTML_MENU_EXPANDED} | ${false}
- ${HTML_HEADER_WITH_MENU_EXPANDED} | ${true}
- `(
- 'with responsive toggle event and html set to $bodyHtml, responsive open = $expectation',
- ({ bodyHtml, expectation }) => {
- document.body.innerHTML = bodyHtml;
-
- triggerResponsiveToggle();
-
- expect(hasBodyResponsiveOpen()).toBe(expectation);
- },
- );
-
- it.each`
events | expectation
${[]} | ${false}
${['bv::dropdown::show']} | ${true}
@@ -96,17 +70,6 @@ describe('~/nav/components/responsive_app.vue', () => {
);
});
- describe('with menu expanded in body', () => {
- beforeEach(() => {
- document.body.innerHTML = HTML_HEADER_WITH_MENU_EXPANDED;
- createComponent();
- });
-
- it('sets the body responsive open', () => {
- expect(hasBodyResponsiveOpen()).toBe(true);
- });
- });
-
const projectsContainerProps = {
containerClass: 'gl-px-3',
frequentItemsDropdownType: ResponsiveApp.FREQUENT_ITEMS_PROJECTS.namespace,
@@ -159,17 +122,4 @@ describe('~/nav/components/responsive_app.vue', () => {
});
});
});
-
- describe('when destroyed', () => {
- beforeEach(() => {
- createComponent();
- wrapper.destroy();
- });
-
- it('responsive toggle event does nothing', () => {
- triggerResponsiveToggle();
-
- expect(hasBodyResponsiveOpen()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/notes/components/comment_field_layout_spec.js b/spec/frontend/notes/components/comment_field_layout_spec.js
index 4d9b4ea8c6f..90c989540b9 100644
--- a/spec/frontend/notes/components/comment_field_layout_spec.js
+++ b/spec/frontend/notes/components/comment_field_layout_spec.js
@@ -134,4 +134,18 @@ describe('Comment Field Layout Component', () => {
]);
});
});
+
+ describe('issue has email participants, but note is confidential', () => {
+ it('does not show EmailParticipantsWarning', () => {
+ createWrapper({
+ noteableData: {
+ ...noteableDataMock,
+ issue_email_participants: [{ email: 'someone@gitlab.com' }],
+ },
+ noteIsConfidential: true,
+ });
+
+ expect(findEmailParticipantsWarning().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index f217dfd2e48..467a8bec21b 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -258,7 +258,11 @@ describe('issue_note', () => {
},
});
- noteBodyComponent.vm.$emit('handleFormUpdate', noteBody, null, () => {});
+ noteBodyComponent.vm.$emit('handleFormUpdate', {
+ noteText: noteBody,
+ parentElement: null,
+ callback: () => {},
+ });
await waitForPromises();
expect(alertSpy).not.toHaveBeenCalled();
@@ -287,14 +291,18 @@ describe('issue_note', () => {
const noteBody = wrapper.findComponent(NoteBody);
noteBody.vm.resetAutoSave = () => {};
- noteBody.vm.$emit('handleFormUpdate', updatedText, null, () => {});
+ noteBody.vm.$emit('handleFormUpdate', {
+ noteText: updatedText,
+ parentElement: null,
+ callback: () => {},
+ });
await wrapper.vm.$nextTick();
let noteBodyProps = noteBody.props();
expect(noteBodyProps.note.note_html).toBe(`<p>${updatedText}</p>\n`);
- noteBody.vm.$emit('cancelForm');
+ noteBody.vm.$emit('cancelForm', {});
await wrapper.vm.$nextTick();
noteBodyProps = noteBody.props();
@@ -305,7 +313,12 @@ describe('issue_note', () => {
describe('formUpdateHandler', () => {
const updateNote = jest.fn();
- const params = ['', null, jest.fn(), ''];
+ const params = {
+ noteText: '',
+ parentElement: null,
+ callback: jest.fn(),
+ resolveDiscussion: false,
+ };
const updateActions = () => {
store.hotUpdate({
@@ -325,14 +338,14 @@ describe('issue_note', () => {
it('responds to handleFormUpdate', () => {
createWrapper();
updateActions();
- wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', ...params);
+ wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
expect(wrapper.emitted('handleUpdateNote')).toBeTruthy();
});
it('does not stringify empty position', () => {
createWrapper();
updateActions();
- wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', ...params);
+ wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
expect(updateNote.mock.calls[0][1].note.note.position).toBeUndefined();
});
@@ -341,7 +354,7 @@ describe('issue_note', () => {
const expectation = JSON.stringify(position);
createWrapper({ note: { ...note, position } });
updateActions();
- wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', ...params);
+ wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
expect(updateNote.mock.calls[0][1].note.note.position).toBe(expectation);
});
});
diff --git a/spec/frontend/packages/details/components/app_spec.js b/spec/frontend/packages/details/components/app_spec.js
index 3132ec61942..377e7e05f09 100644
--- a/spec/frontend/packages/details/components/app_spec.js
+++ b/spec/frontend/packages/details/components/app_spec.js
@@ -2,6 +2,7 @@ import { GlEmptyState } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import stubChildren from 'helpers/stub_children';
import AdditionalMetadata from '~/packages/details/components/additional_metadata.vue';
@@ -30,6 +31,8 @@ import {
const localVue = createLocalVue();
localVue.use(Vuex);
+useMockLocationHelper();
+
describe('PackagesApp', () => {
let wrapper;
let store;
@@ -37,7 +40,6 @@ describe('PackagesApp', () => {
const deletePackage = jest.fn();
const deletePackageFile = jest.fn();
const defaultProjectName = 'bar';
- const { location } = window;
function createComponent({
packageEntity = mavenPackage,
@@ -100,14 +102,8 @@ describe('PackagesApp', () => {
const findInstallationCommands = () => wrapper.find(InstallationCommands);
const findPackageFiles = () => wrapper.find(PackageFiles);
- beforeEach(() => {
- delete window.location;
- window.location = { replace: jest.fn() };
- });
-
afterEach(() => {
wrapper.destroy();
- window.location = location;
});
it('renders the app and displays the package title', async () => {
diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js
index 4de2dd0789e..b94192c531c 100644
--- a/spec/frontend/packages/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages/list/components/packages_list_app_spec.js
@@ -1,6 +1,7 @@
import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import setWindowLocation from 'helpers/set_window_location_helper';
import createFlash from '~/flash';
import * as commonUtils from '~/lib/utils/common_utils';
import PackageListApp from '~/packages/list/components/packages_list_app.vue';
@@ -233,21 +234,17 @@ describe('packages_list_app', () => {
});
describe('delete alert handling', () => {
- const { location } = window.location;
+ const originalLocation = window.location.href;
const search = `?${SHOW_DELETE_SUCCESS_ALERT}=true`;
beforeEach(() => {
createStore();
jest.spyOn(commonUtils, 'historyReplaceState').mockImplementation(() => {});
- delete window.location;
- window.location = {
- href: `foo_bar_baz${search}`,
- search,
- };
+ setWindowLocation(search);
});
afterEach(() => {
- window.location = location;
+ setWindowLocation(originalLocation);
});
it(`creates a flash if the query string contains ${SHOW_DELETE_SUCCESS_ALERT}`, () => {
@@ -262,11 +259,11 @@ describe('packages_list_app', () => {
it('calls historyReplaceState with a clean url', () => {
mountComponent();
- expect(commonUtils.historyReplaceState).toHaveBeenCalledWith('foo_bar_baz');
+ expect(commonUtils.historyReplaceState).toHaveBeenCalledWith(originalLocation);
});
it(`does nothing if the query string does not contain ${SHOW_DELETE_SUCCESS_ALERT}`, () => {
- window.location.search = '';
+ setWindowLocation('?');
mountComponent();
expect(createFlash).not.toHaveBeenCalled();
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap
new file mode 100644
index 00000000000..e9f80d5f512
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap
@@ -0,0 +1,36 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`ConanInstallation renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object]"
+ packagetype="conan"
+ />
+
+ <code-instruction-stub
+ copytext="Copy Conan Command"
+ instruction="conan install @gitlab-org/package-15 --remote=gitlab"
+ label="Conan Command"
+ trackingaction="copy_conan_command"
+ trackinglabel="code_instruction"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <code-instruction-stub
+ copytext="Copy Conan Setup Command"
+ instruction="conan remote add gitlab conanPath"
+ label="Add Conan Remote"
+ trackingaction="copy_conan_setup_command"
+ trackinglabel="code_instruction"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the Conan registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap
new file mode 100644
index 00000000000..f83df7b11f4
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap
@@ -0,0 +1,36 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`DependencyRow renders full dependency 1`] = `
+<div
+ class="gl-responsive-table-row"
+>
+ <div
+ class="table-section section-50"
+ >
+ <strong
+ class="gl-text-body"
+ >
+ Ninject.Extensions.Factory
+ </strong>
+
+ <span
+ data-testid="target-framework"
+ >
+
+ (.NETCoreApp3.1)
+
+ </span>
+ </div>
+
+ <div
+ class="table-section section-50 gl-display-flex gl-md-justify-content-end"
+ data-testid="version-pattern"
+ >
+ <span
+ class="gl-text-body"
+ >
+ 3.3.2
+ </span>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
new file mode 100644
index 00000000000..881d441e116
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
@@ -0,0 +1,30 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`FileSha renders 1`] = `
+<div
+ class="gl-display-flex gl-align-items-center gl-font-monospace gl-font-sm gl-word-break-all gl-py-2 gl-border-b-solid gl-border-gray-100 gl-border-b-1"
+>
+ <!---->
+
+ <span>
+ <div
+ class="gl-px-4"
+ >
+
+ bar:
+ foo
+
+ <gl-button-stub
+ aria-label="Copy this value"
+ buttontextclasses=""
+ category="tertiary"
+ data-clipboard-text="foo"
+ icon="copy-to-clipboard"
+ size="small"
+ title="Copy SHA"
+ variant="default"
+ />
+ </div>
+ </span>
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap
new file mode 100644
index 00000000000..4865b8205ab
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap
@@ -0,0 +1,135 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MavenInstallation groovy renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object],[object Object],[object Object]"
+ packagetype="maven"
+ />
+
+ <code-instruction-stub
+ class="gl-mb-5"
+ copytext="Copy Gradle Groovy DSL install command"
+ instruction="implementation 'appGroup:appName:appVersion'"
+ label="Gradle Groovy DSL install command"
+ trackingaction="copy_gradle_install_command"
+ trackinglabel="code_instruction"
+ />
+
+ <code-instruction-stub
+ copytext="Copy add Gradle Groovy DSL repository command"
+ instruction="maven {
+ url 'mavenPath'
+}"
+ label="Add Gradle Groovy DSL repository command"
+ multiline="true"
+ trackingaction="copy_gradle_add_to_source_command"
+ trackinglabel="code_instruction"
+ />
+</div>
+`;
+
+exports[`MavenInstallation kotlin renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object],[object Object],[object Object]"
+ packagetype="maven"
+ />
+
+ <code-instruction-stub
+ class="gl-mb-5"
+ copytext="Copy Gradle Kotlin DSL install command"
+ instruction="implementation(\\"appGroup:appName:appVersion\\")"
+ label="Gradle Kotlin DSL install command"
+ trackingaction="copy_kotlin_install_command"
+ trackinglabel="code_instruction"
+ />
+
+ <code-instruction-stub
+ copytext="Copy add Gradle Kotlin DSL repository command"
+ instruction="maven(\\"mavenPath\\")"
+ label="Add Gradle Kotlin DSL repository command"
+ multiline="true"
+ trackingaction="copy_kotlin_add_to_source_command"
+ trackinglabel="code_instruction"
+ />
+</div>
+`;
+
+exports[`MavenInstallation maven renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object],[object Object],[object Object]"
+ packagetype="maven"
+ />
+
+ <p>
+ <gl-sprintf-stub
+ message="Copy and paste this inside your %{codeStart}pom.xml%{codeEnd} %{codeStart}dependencies%{codeEnd} block."
+ />
+ </p>
+
+ <code-instruction-stub
+ copytext="Copy Maven XML"
+ instruction="<dependency>
+ <groupId>appGroup</groupId>
+ <artifactId>appName</artifactId>
+ <version>appVersion</version>
+</dependency>"
+ label=""
+ multiline="true"
+ trackingaction="copy_maven_xml"
+ trackinglabel="code_instruction"
+ />
+
+ <code-instruction-stub
+ copytext="Copy Maven command"
+ instruction="mvn dependency:get -Dartifact=appGroup:appName:appVersion"
+ label="Maven Command"
+ trackingaction="copy_maven_command"
+ trackinglabel="code_instruction"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <p>
+ <gl-sprintf-stub
+ message="If you haven't already done so, you will need to add the below to your %{codeStart}pom.xml%{codeEnd} file."
+ />
+ </p>
+
+ <code-instruction-stub
+ copytext="Copy Maven registry XML"
+ instruction="<repositories>
+ <repository>
+ <id>gitlab-maven</id>
+ <url>mavenPath</url>
+ </repository>
+</repositories>
+
+<distributionManagement>
+ <repository>
+ <id>gitlab-maven</id>
+ <url>mavenPath</url>
+ </repository>
+
+ <snapshotRepository>
+ <id>gitlab-maven</id>
+ <url>mavenPath</url>
+ </snapshotRepository>
+</distributionManagement>"
+ label=""
+ multiline="true"
+ trackingaction="copy_maven_setup_xml"
+ trackinglabel="code_instruction"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the Maven registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
new file mode 100644
index 00000000000..6a7f14dc33f
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
@@ -0,0 +1,36 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`NpmInstallation renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object],[object Object]"
+ packagetype="npm"
+ />
+
+ <code-instruction-stub
+ copytext="Copy npm command"
+ instruction="npm i @gitlab-org/package-15"
+ label=""
+ trackingaction="copy_npm_install_command"
+ trackinglabel="code_instruction"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <code-instruction-stub
+ copytext="Copy npm setup command"
+ instruction="echo @gitlab-org:registry=npmPath/ >> .npmrc"
+ label=""
+ trackingaction="copy_npm_setup_command"
+ trackinglabel="code_instruction"
+ />
+
+ <gl-sprintf-stub
+ message="You may also need to setup authentication using an auth token. %{linkStart}See the documentation%{linkEnd} to find out more."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap
new file mode 100644
index 00000000000..29ddd7b77ed
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap
@@ -0,0 +1,36 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`NugetInstallation renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object]"
+ packagetype="nuget"
+ />
+
+ <code-instruction-stub
+ copytext="Copy NuGet Command"
+ instruction="nuget install @gitlab-org/package-15 -Source \\"GitLab\\""
+ label="NuGet Command"
+ trackingaction="copy_nuget_install_command"
+ trackinglabel="code_instruction"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <code-instruction-stub
+ copytext="Copy NuGet Setup Command"
+ instruction="nuget source Add -Name \\"GitLab\\" -Source \\"nugetPath\\" -UserName <your_username> -Password <your_token>"
+ label="Add NuGet Source"
+ trackingaction="copy_nuget_setup_command"
+ trackinglabel="code_instruction"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the NuGet registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
new file mode 100644
index 00000000000..45d261625b4
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
@@ -0,0 +1,197 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PackageTitle renders with tags 1`] = `
+<div
+ class="gl-display-flex gl-flex-direction-column"
+ data-qa-selector="package_title"
+>
+ <div
+ class="gl-display-flex gl-justify-content-space-between gl-py-3"
+ >
+ <div
+ class="gl-flex-direction-column gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex"
+ >
+ <!---->
+
+ <div
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ data-testid="title"
+ >
+ @gitlab-org/package-15
+ </h1>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <span
+ data-testid="sub-header"
+ >
+ v
+ 1.0.0
+ published
+ <time-ago-tooltip-stub
+ class="gl-ml-2"
+ cssclass=""
+ time="2020-08-17T14:23:32Z"
+ tooltipplacement="top"
+ />
+ </span>
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-type"
+ icon="package"
+ link=""
+ size="s"
+ text="npm"
+ texttooltip=""
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-size"
+ icon="disk"
+ link=""
+ size="s"
+ text="800.00 KiB"
+ texttooltip=""
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <package-tags-stub
+ hidelabel="true"
+ tagdisplaylimit="2"
+ tags="[object Object],[object Object],[object Object]"
+ />
+ </div>
+ </div>
+ </div>
+
+ <!---->
+ </div>
+
+ <p />
+</div>
+`;
+
+exports[`PackageTitle renders without tags 1`] = `
+<div
+ class="gl-display-flex gl-flex-direction-column"
+ data-qa-selector="package_title"
+>
+ <div
+ class="gl-display-flex gl-justify-content-space-between gl-py-3"
+ >
+ <div
+ class="gl-flex-direction-column gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex"
+ >
+ <!---->
+
+ <div
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ data-testid="title"
+ >
+ @gitlab-org/package-15
+ </h1>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <span
+ data-testid="sub-header"
+ >
+ v
+ 1.0.0
+ published
+ <time-ago-tooltip-stub
+ class="gl-ml-2"
+ cssclass=""
+ time="2020-08-17T14:23:32Z"
+ tooltipplacement="top"
+ />
+ </span>
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-type"
+ icon="package"
+ link=""
+ size="s"
+ text="npm"
+ texttooltip=""
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-size"
+ icon="disk"
+ link=""
+ size="s"
+ text="800.00 KiB"
+ texttooltip=""
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <package-tags-stub
+ hidelabel="true"
+ tagdisplaylimit="2"
+ tags="[object Object],[object Object],[object Object]"
+ />
+ </div>
+ </div>
+ </div>
+
+ <!---->
+ </div>
+
+ <p />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
new file mode 100644
index 00000000000..158bbbc3463
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
@@ -0,0 +1,48 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PypiInstallation renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object]"
+ packagetype="pypi"
+ />
+
+ <code-instruction-stub
+ copytext="Copy Pip command"
+ data-testid="pip-command"
+ instruction="pip install @gitlab-org/package-15 --extra-index-url pypiPath"
+ label="Pip Command"
+ trackingaction="copy_pip_install_command"
+ trackinglabel="code_instruction"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <p>
+ <gl-sprintf-stub
+ message="If you haven't already done so, you will need to add the below to your %{codeStart}.pypirc%{codeEnd} file."
+ />
+ </p>
+
+ <code-instruction-stub
+ copytext="Copy .pypirc content"
+ data-testid="pypi-setup-content"
+ instruction="[gitlab]
+repository = pypiSetupPath
+username = __token__
+password = <your personal access token>"
+ label=""
+ multiline="true"
+ trackingaction="copy_pypi_setup_command"
+ trackinglabel="code_instruction"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the PyPi registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap
new file mode 100644
index 00000000000..8f69f943112
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap
@@ -0,0 +1,101 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`VersionRow renders 1`] = `
+<div
+ class="gl-display-flex gl-flex-direction-column gl-border-b-solid gl-border-t-solid gl-border-t-1 gl-border-b-1 gl-border-t-transparent gl-border-b-gray-100"
+>
+ <div
+ class="gl-display-flex gl-align-items-center gl-py-3 gl-px-5"
+ >
+ <!---->
+
+ <div
+ class="gl-display-flex gl-xs-flex-direction-column gl-justify-content-space-between gl-align-items-stretch gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-xs-mb-3 gl-min-w-0 gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-body gl-font-weight-bold gl-min-h-6 gl-min-w-0"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-3 gl-min-w-0"
+ >
+ <gl-link-stub
+ class="gl-text-body gl-min-w-0"
+ href="243"
+ >
+ <span
+ class="gl-truncate"
+ title="@gitlab-org/package-15"
+ >
+ <span
+ class="gl-truncate-end"
+ >
+ @gitlab-org/package-15
+ </span>
+ </span>
+ </gl-link-stub>
+
+ <package-tags-stub
+ class="gl-ml-3"
+ hidelabel="true"
+ tagdisplaylimit="1"
+ tags="[object Object],[object Object],[object Object]"
+ />
+ </div>
+
+ <!---->
+ </div>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-min-h-6 gl-min-w-0 gl-flex-grow-1"
+ >
+
+ 1.0.1
+
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-sm-align-items-flex-end gl-justify-content-space-between gl-text-gray-500 gl-flex-shrink-0"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-sm-text-body gl-sm-font-weight-bold gl-min-h-6"
+ >
+ <publish-method-stub
+ packageentity="[object Object]"
+ />
+ </div>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-min-h-6"
+ >
+ Created
+ <time-ago-tooltip-stub
+ cssclass=""
+ time="2021-08-10T09:33:54Z"
+ tooltipplacement="top"
+ />
+ </div>
+ </div>
+ </div>
+
+ <!---->
+ </div>
+
+ <div
+ class="gl-display-flex"
+ >
+ <div
+ class="gl-w-7"
+ />
+
+ <!---->
+
+ <div
+ class="gl-w-9"
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
new file mode 100644
index 00000000000..0504a42dfcf
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
@@ -0,0 +1,130 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ conanMetadata,
+ mavenMetadata,
+ nugetMetadata,
+ packageData,
+} from 'jest/packages_and_registries/package_registry/mock_data';
+import component from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
+import {
+ PACKAGE_TYPE_NUGET,
+ PACKAGE_TYPE_CONAN,
+ PACKAGE_TYPE_MAVEN,
+ PACKAGE_TYPE_NPM,
+} from '~/packages_and_registries/package_registry/constants';
+import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
+
+const mavenPackage = { packageType: PACKAGE_TYPE_MAVEN, metadata: mavenMetadata() };
+const conanPackage = { packageType: PACKAGE_TYPE_CONAN, metadata: conanMetadata() };
+const nugetPackage = { packageType: PACKAGE_TYPE_NUGET, metadata: nugetMetadata() };
+const npmPackage = { packageType: PACKAGE_TYPE_NPM, metadata: {} };
+
+describe('Package Additional Metadata', () => {
+ let wrapper;
+ const defaultProps = {
+ packageEntity: {
+ ...packageData(mavenPackage),
+ },
+ };
+
+ const mountComponent = (props) => {
+ wrapper = shallowMountExtended(component, {
+ propsData: { ...defaultProps, ...props },
+ stubs: {
+ DetailsRow,
+ GlSprintf,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findTitle = () => wrapper.findByTestId('title');
+ const findMainArea = () => wrapper.findByTestId('main');
+ const findNugetSource = () => wrapper.findByTestId('nuget-source');
+ const findNugetLicense = () => wrapper.findByTestId('nuget-license');
+ const findConanRecipe = () => wrapper.findByTestId('conan-recipe');
+ const findMavenApp = () => wrapper.findByTestId('maven-app');
+ const findMavenGroup = () => wrapper.findByTestId('maven-group');
+ const findElementLink = (container) => container.findComponent(GlLink);
+
+ it('has the correct title', () => {
+ mountComponent();
+
+ const title = findTitle();
+
+ expect(title.exists()).toBe(true);
+ expect(title.text()).toBe('Additional Metadata');
+ });
+
+ it.each`
+ packageEntity | visible | packageType
+ ${mavenPackage} | ${true} | ${PACKAGE_TYPE_MAVEN}
+ ${conanPackage} | ${true} | ${PACKAGE_TYPE_CONAN}
+ ${nugetPackage} | ${true} | ${PACKAGE_TYPE_NUGET}
+ ${npmPackage} | ${false} | ${PACKAGE_TYPE_NPM}
+ `(
+ `It is $visible that the component is visible when the package is $packageType`,
+ ({ packageEntity, visible }) => {
+ mountComponent({ packageEntity });
+
+ expect(findTitle().exists()).toBe(visible);
+ expect(findMainArea().exists()).toBe(visible);
+ },
+ );
+
+ describe('nuget metadata', () => {
+ beforeEach(() => {
+ mountComponent({ packageEntity: nugetPackage });
+ });
+
+ it.each`
+ name | finderFunction | text | link | icon
+ ${'source'} | ${findNugetSource} | ${'Source project located at projectUrl'} | ${'projectUrl'} | ${'project'}
+ ${'license'} | ${findNugetLicense} | ${'License information located at licenseUrl'} | ${'licenseUrl'} | ${'license'}
+ `('$name element', ({ finderFunction, text, link, icon }) => {
+ const element = finderFunction();
+ expect(element.exists()).toBe(true);
+ expect(element.text()).toBe(text);
+ expect(element.props('icon')).toBe(icon);
+ expect(findElementLink(element).attributes('href')).toBe(nugetPackage.metadata[link]);
+ });
+ });
+
+ describe('conan metadata', () => {
+ beforeEach(() => {
+ mountComponent({ packageEntity: conanPackage });
+ });
+
+ it.each`
+ name | finderFunction | text | icon
+ ${'recipe'} | ${findConanRecipe} | ${'Recipe: package-8/1.0.0@gitlab-org+gitlab-test/stable'} | ${'information-o'}
+ `('$name element', ({ finderFunction, text, icon }) => {
+ const element = finderFunction();
+ expect(element.exists()).toBe(true);
+ expect(element.text()).toBe(text);
+ expect(element.props('icon')).toBe(icon);
+ });
+ });
+
+ describe('maven metadata', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it.each`
+ name | finderFunction | text | icon
+ ${'app'} | ${findMavenApp} | ${'App name: appName'} | ${'information-o'}
+ ${'group'} | ${findMavenGroup} | ${'App group: appGroup'} | ${'information-o'}
+ `('$name element', ({ finderFunction, text, icon }) => {
+ const element = finderFunction();
+ expect(element.exists()).toBe(true);
+ expect(element.text()).toBe(text);
+ expect(element.props('icon')).toBe(icon);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js
index 97444ec108f..5119512564f 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js
@@ -1,35 +1,451 @@
-import { GlEmptyState } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlEmptyState, GlBadge, GlTabs, GlTab } from '@gitlab/ui';
+import { createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import AdditionalMetadata from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
import PackagesApp from '~/packages_and_registries/package_registry/components/details/app.vue';
+import DependencyRow from '~/packages_and_registries/package_registry/components/details/dependency_row.vue';
+import InstallationCommands from '~/packages_and_registries/package_registry/components/details/installation_commands.vue';
+import PackageFiles from '~/packages_and_registries/package_registry/components/details/package_files.vue';
+import PackageHistory from '~/packages_and_registries/package_registry/components/details/package_history.vue';
+import PackageTitle from '~/packages_and_registries/package_registry/components/details/package_title.vue';
+import VersionRow from '~/packages_and_registries/package_registry/components/details/version_row.vue';
+import {
+ FETCH_PACKAGE_DETAILS_ERROR_MESSAGE,
+ DELETE_PACKAGE_ERROR_MESSAGE,
+ PACKAGE_TYPE_COMPOSER,
+ DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
+ DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ PACKAGE_TYPE_NUGET,
+} from '~/packages_and_registries/package_registry/constants';
+
+import destroyPackageMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package.mutation.graphql';
+import destroyPackageFileMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_file.mutation.graphql';
+import getPackageDetails from '~/packages_and_registries/package_registry/graphql/queries/get_package_details.query.graphql';
+import {
+ packageDetailsQuery,
+ packageData,
+ packageVersions,
+ dependencyLinks,
+ emptyPackageDetailsQuery,
+ packageDestroyMutation,
+ packageDestroyMutationError,
+ packageFiles,
+ packageDestroyFileMutation,
+ packageDestroyFileMutationError,
+} from '../../mock_data';
+
+jest.mock('~/flash');
+useMockLocationHelper();
+
+const localVue = createLocalVue();
describe('PackagesApp', () => {
let wrapper;
+ let apolloProvider;
+
+ const provide = {
+ packageId: '111',
+ titleComponent: 'PackageTitle',
+ projectName: 'projectName',
+ canDelete: 'canDelete',
+ svgPath: 'svgPath',
+ npmPath: 'npmPath',
+ npmHelpPath: 'npmHelpPath',
+ projectListUrl: 'projectListUrl',
+ groupListUrl: 'groupListUrl',
+ };
- function createComponent() {
- wrapper = shallowMount(PackagesApp, {
- provide: {
- titleComponent: 'titleComponent',
- projectName: 'projectName',
- canDelete: 'canDelete',
- svgPath: 'svgPath',
- npmPath: 'npmPath',
- npmHelpPath: 'npmHelpPath',
- projectListUrl: 'projectListUrl',
- groupListUrl: 'groupListUrl',
+ function createComponent({
+ resolver = jest.fn().mockResolvedValue(packageDetailsQuery()),
+ mutationResolver = jest.fn().mockResolvedValue(packageDestroyMutation()),
+ fileDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFileMutation()),
+ } = {}) {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [
+ [getPackageDetails, resolver],
+ [destroyPackageMutation, mutationResolver],
+ [destroyPackageFileMutation, fileDeleteMutationResolver],
+ ];
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMountExtended(PackagesApp, {
+ localVue,
+ apolloProvider,
+ provide,
+ stubs: {
+ PackageTitle,
+ GlModal: {
+ template: '<div></div>',
+ methods: {
+ show: jest.fn(),
+ },
+ },
+ GlTabs,
+ GlTab,
},
});
}
- const emptyState = () => wrapper.findComponent(GlEmptyState);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findPackageTitle = () => wrapper.findComponent(PackageTitle);
+ const findPackageHistory = () => wrapper.findComponent(PackageHistory);
+ const findAdditionalMetadata = () => wrapper.findComponent(AdditionalMetadata);
+ const findInstallationCommands = () => wrapper.findComponent(InstallationCommands);
+ const findDeleteModal = () => wrapper.findByTestId('delete-modal');
+ const findDeleteButton = () => wrapper.findByTestId('delete-package');
+ const findPackageFiles = () => wrapper.findComponent(PackageFiles);
+ const findDeleteFileModal = () => wrapper.findByTestId('delete-file-modal');
+ const findVersionRows = () => wrapper.findAllComponents(VersionRow);
+ const noVersionsMessage = () => wrapper.findByTestId('no-versions-message');
+ const findDependenciesCountBadge = () => wrapper.findComponent(GlBadge);
+ const findNoDependenciesMessage = () => wrapper.findByTestId('no-dependencies-message');
+ const findDependencyRows = () => wrapper.findAllComponents(DependencyRow);
afterEach(() => {
wrapper.destroy();
});
- it('renders an empty state component', () => {
+ it('renders an empty state component', async () => {
+ createComponent({ resolver: jest.fn().mockResolvedValue(emptyPackageDetailsQuery) });
+
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('renders the app and displays the package title', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findPackageTitle().exists()).toBe(true);
+ expect(findPackageTitle().props()).toMatchObject({
+ packageEntity: expect.objectContaining(packageData()),
+ });
+ });
+
+ it('emits an error message if the load fails', async () => {
+ createComponent({ resolver: jest.fn().mockRejectedValue() });
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: FETCH_PACKAGE_DETAILS_ERROR_MESSAGE,
+ }),
+ );
+ });
+
+ it('renders history and has the right props', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findPackageHistory().exists()).toBe(true);
+ expect(findPackageHistory().props()).toMatchObject({
+ packageEntity: expect.objectContaining(packageData()),
+ projectName: provide.projectName,
+ });
+ });
+
+ it('renders additional metadata and has the right props', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findAdditionalMetadata().exists()).toBe(true);
+ expect(findAdditionalMetadata().props()).toMatchObject({
+ packageEntity: expect.objectContaining(packageData()),
+ });
+ });
+
+ it('renders installation commands and has the right props', async () => {
createComponent();
- expect(emptyState().exists()).toBe(true);
+ await waitForPromises();
+
+ expect(findInstallationCommands().exists()).toBe(true);
+ expect(findInstallationCommands().props()).toMatchObject({
+ packageEntity: expect.objectContaining(packageData()),
+ });
+ });
+
+ describe('delete package', () => {
+ const originalReferrer = document.referrer;
+ const setReferrer = (value = provide.projectName) => {
+ Object.defineProperty(document, 'referrer', {
+ value,
+ configurable: true,
+ });
+ };
+
+ const performDeletePackage = async () => {
+ await findDeleteButton().trigger('click');
+
+ findDeleteModal().vm.$emit('primary');
+
+ await waitForPromises();
+ };
+
+ afterEach(() => {
+ Object.defineProperty(document, 'referrer', {
+ value: originalReferrer,
+ configurable: true,
+ });
+ });
+
+ it('shows the delete confirmation modal when delete is clicked', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ await findDeleteButton().trigger('click');
+
+ expect(findDeleteModal().exists()).toBe(true);
+ });
+
+ describe('successful request', () => {
+ it('when referrer contains project name calls window.replace with project url', async () => {
+ setReferrer();
+
+ createComponent();
+
+ await waitForPromises();
+
+ await performDeletePackage();
+
+ expect(window.location.replace).toHaveBeenCalledWith(
+ 'projectListUrl?showSuccessDeleteAlert=true',
+ );
+ });
+
+ it('when referrer does not contain project name calls window.replace with group url', async () => {
+ setReferrer('baz');
+
+ createComponent();
+
+ await waitForPromises();
+
+ await performDeletePackage();
+
+ expect(window.location.replace).toHaveBeenCalledWith(
+ 'groupListUrl?showSuccessDeleteAlert=true',
+ );
+ });
+ });
+
+ describe('request failure', () => {
+ it('on global failure it displays an alert', async () => {
+ createComponent({ mutationResolver: jest.fn().mockRejectedValue() });
+
+ await waitForPromises();
+
+ await performDeletePackage();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_ERROR_MESSAGE,
+ }),
+ );
+ });
+
+ it('on payload with error it displays an alert', async () => {
+ createComponent({
+ mutationResolver: jest.fn().mockResolvedValue(packageDestroyMutationError()),
+ });
+
+ await waitForPromises();
+
+ await performDeletePackage();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_ERROR_MESSAGE,
+ }),
+ );
+ });
+ });
+ });
+
+ describe('package files', () => {
+ it('renders the package files component and has the right props', async () => {
+ const expectedFile = { ...packageFiles()[0] };
+ // eslint-disable-next-line no-underscore-dangle
+ delete expectedFile.__typename;
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findPackageFiles().exists()).toBe(true);
+
+ expect(findPackageFiles().props('packageFiles')[0]).toMatchObject(expectedFile);
+ });
+
+ it('does not render the package files table when the package is composer', async () => {
+ createComponent({
+ resolver: jest
+ .fn()
+ .mockResolvedValue(packageDetailsQuery({ packageType: PACKAGE_TYPE_COMPOSER })),
+ });
+
+ await waitForPromises();
+
+ expect(findPackageFiles().exists()).toBe(false);
+ });
+
+ describe('deleting a file', () => {
+ const [fileToDelete] = packageFiles();
+
+ const doDeleteFile = () => {
+ findPackageFiles().vm.$emit('delete-file', fileToDelete);
+
+ findDeleteFileModal().vm.$emit('primary');
+
+ return waitForPromises();
+ };
+
+ it('opens a confirmation modal', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ findPackageFiles().vm.$emit('delete-file', fileToDelete);
+
+ await nextTick();
+
+ expect(findDeleteFileModal().exists()).toBe(true);
+ });
+
+ it('confirming on the modal deletes the file and shows a success message', async () => {
+ const resolver = jest.fn().mockResolvedValue(packageDetailsQuery());
+ createComponent({ resolver });
+
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
+ }),
+ );
+ // we are re-fetching the package details, so we expect the resolver to have been called twice
+ expect(resolver).toHaveBeenCalledTimes(2);
+ });
+
+ describe('errors', () => {
+ it('shows an error when the mutation request fails', async () => {
+ createComponent({ fileDeleteMutationResolver: jest.fn().mockRejectedValue() });
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ }),
+ );
+ });
+
+ it('shows an error when the mutation request returns an error payload', async () => {
+ createComponent({
+ fileDeleteMutationResolver: jest
+ .fn()
+ .mockResolvedValue(packageDestroyFileMutationError()),
+ });
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ }),
+ );
+ });
+ });
+ });
+ });
+
+ describe('versions', () => {
+ it('displays the correct version count when the package has versions', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findVersionRows()).toHaveLength(packageVersions().length);
+ });
+
+ it('binds the correct props', async () => {
+ const [versionPackage] = packageVersions();
+ // eslint-disable-next-line no-underscore-dangle
+ delete versionPackage.__typename;
+ delete versionPackage.tags;
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findVersionRows().at(0).props()).toMatchObject({
+ packageEntity: expect.objectContaining(versionPackage),
+ });
+ });
+
+ it('displays the no versions message when there are none', async () => {
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(packageDetailsQuery({ versions: { nodes: [] } })),
+ });
+
+ await waitForPromises();
+
+ expect(noVersionsMessage().exists()).toBe(true);
+ });
+ });
+ describe('dependency links', () => {
+ it('does not show the dependency links for a non nuget package', async () => {
+ createComponent();
+
+ expect(findDependenciesCountBadge().exists()).toBe(false);
+ });
+
+ it('shows the dependencies tab with 0 count when a nuget package with no dependencies', async () => {
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(
+ packageDetailsQuery({
+ packageType: PACKAGE_TYPE_NUGET,
+ dependencyLinks: { nodes: [] },
+ }),
+ ),
+ });
+
+ await waitForPromises();
+
+ expect(findDependenciesCountBadge().exists()).toBe(true);
+ expect(findDependenciesCountBadge().text()).toBe('0');
+ expect(findNoDependenciesMessage().exists()).toBe(true);
+ });
+
+ it('renders the correct number of dependency rows for a nuget package', async () => {
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(
+ packageDetailsQuery({
+ packageType: PACKAGE_TYPE_NUGET,
+ }),
+ ),
+ });
+ await waitForPromises();
+
+ expect(findDependenciesCountBadge().exists()).toBe(true);
+ expect(findDependenciesCountBadge().text()).toBe(dependencyLinks().length.toString());
+ expect(findDependencyRows()).toHaveLength(dependencyLinks().length);
+ });
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/composer_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/composer_installation_spec.js
new file mode 100644
index 00000000000..aedf20e873a
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/composer_installation_spec.js
@@ -0,0 +1,118 @@
+import { GlSprintf, GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
+import ComposerInstallation from '~/packages_and_registries/package_registry/components/details/composer_installation.vue';
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import {
+ TRACKING_ACTION_COPY_COMPOSER_REGISTRY_INCLUDE_COMMAND,
+ TRACKING_ACTION_COPY_COMPOSER_PACKAGE_INCLUDE_COMMAND,
+ PACKAGE_TYPE_COMPOSER,
+} from '~/packages_and_registries/package_registry/constants';
+
+const packageEntity = { ...packageData(), packageType: PACKAGE_TYPE_COMPOSER };
+
+describe('ComposerInstallation', () => {
+ let wrapper;
+
+ const findRootNode = () => wrapper.findByTestId('root-node');
+ const findRegistryInclude = () => wrapper.findByTestId('registry-include');
+ const findPackageInclude = () => wrapper.findByTestId('package-include');
+ const findHelpText = () => wrapper.findByTestId('help-text');
+ const findHelpLink = () => wrapper.findComponent(GlLink);
+ const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+
+ function createComponent(groupListUrl = 'groupListUrl') {
+ wrapper = shallowMountExtended(ComposerInstallation, {
+ provide: {
+ composerHelpPath: 'composerHelpPath',
+ composerConfigRepositoryName: 'composerConfigRepositoryName',
+ composerPath: 'composerPath',
+ groupListUrl,
+ },
+ propsData: { packageEntity },
+ stubs: {
+ GlSprintf,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('install command switch', () => {
+ it('has the installation title component', () => {
+ createComponent();
+
+ expect(findInstallationTitle().exists()).toBe(true);
+ expect(findInstallationTitle().props()).toMatchObject({
+ packageType: 'composer',
+ options: [{ value: 'composer', label: 'Show Composer commands' }],
+ });
+ });
+ });
+
+ describe('registry include command', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('uses code_instructions', () => {
+ const registryIncludeCommand = findRegistryInclude();
+ expect(registryIncludeCommand.exists()).toBe(true);
+ expect(registryIncludeCommand.props()).toMatchObject({
+ instruction: `composer config repositories.composerConfigRepositoryName '{"type": "composer", "url": "composerPath"}'`,
+ copyText: 'Copy registry include',
+ trackingAction: TRACKING_ACTION_COPY_COMPOSER_REGISTRY_INCLUDE_COMMAND,
+ });
+ });
+
+ it('has the correct title', () => {
+ expect(findRegistryInclude().props('label')).toBe('Add composer registry');
+ });
+ });
+
+ describe('package include command', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('uses code_instructions', () => {
+ const registryIncludeCommand = findPackageInclude();
+ expect(registryIncludeCommand.exists()).toBe(true);
+ expect(registryIncludeCommand.props()).toMatchObject({
+ instruction: 'composer req @gitlab-org/package-15:1.0.0',
+ copyText: 'Copy require package include',
+ trackingAction: TRACKING_ACTION_COPY_COMPOSER_PACKAGE_INCLUDE_COMMAND,
+ });
+ });
+
+ it('has the correct title', () => {
+ expect(findPackageInclude().props('label')).toBe('Install package version');
+ });
+
+ it('has the correct help text', () => {
+ expect(findHelpText().text()).toBe(
+ 'For more information on Composer packages in GitLab, see the documentation.',
+ );
+ expect(findHelpLink().attributes()).toMatchObject({
+ href: 'composerHelpPath',
+ target: '_blank',
+ });
+ });
+ });
+
+ describe('root node', () => {
+ it('is normally rendered', () => {
+ createComponent();
+
+ expect(findRootNode().exists()).toBe(true);
+ });
+
+ it('is not rendered when the group does not exist', () => {
+ createComponent('');
+
+ expect(findRootNode().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/conan_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/conan_installation_spec.js
new file mode 100644
index 00000000000..6b642cc21b7
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/conan_installation_spec.js
@@ -0,0 +1,65 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
+import ConanInstallation from '~/packages_and_registries/package_registry/components/details/conan_installation.vue';
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import { PACKAGE_TYPE_CONAN } from '~/packages_and_registries/package_registry/constants';
+import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
+
+const packageEntity = { ...packageData(), packageType: PACKAGE_TYPE_CONAN };
+
+describe('ConanInstallation', () => {
+ let wrapper;
+
+ const findCodeInstructions = () => wrapper.findAllComponents(CodeInstructions);
+ const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+
+ function createComponent() {
+ wrapper = shallowMountExtended(ConanInstallation, {
+ provide: {
+ conanHelpPath: 'conanHelpPath',
+ conanPath: 'conanPath',
+ },
+ propsData: {
+ packageEntity,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('install command switch', () => {
+ it('has the installation title component', () => {
+ expect(findInstallationTitle().exists()).toBe(true);
+ expect(findInstallationTitle().props()).toMatchObject({
+ packageType: 'conan',
+ options: [{ value: 'conan', label: 'Show Conan commands' }],
+ });
+ });
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct command', () => {
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(
+ 'conan install @gitlab-org/package-15 --remote=gitlab',
+ );
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct command', () => {
+ expect(findCodeInstructions().at(1).props('instruction')).toBe(
+ 'conan remote add gitlab conanPath',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/dependency_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/dependency_row_spec.js
new file mode 100644
index 00000000000..9aed5b90c73
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/dependency_row_spec.js
@@ -0,0 +1,69 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DependencyRow from '~/packages_and_registries/package_registry/components/details/dependency_row.vue';
+import { dependencyLinks } from '../../mock_data';
+
+describe('DependencyRow', () => {
+ let wrapper;
+
+ const [fullDependencyLink] = dependencyLinks();
+ const { dependency, metadata } = fullDependencyLink;
+
+ function createComponent(dependencyLink = fullDependencyLink) {
+ wrapper = shallowMountExtended(DependencyRow, {
+ propsData: {
+ dependencyLink,
+ },
+ });
+ }
+
+ const dependencyVersion = () => wrapper.findByTestId('version-pattern');
+ const dependencyFramework = () => wrapper.findByTestId('target-framework');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('renders', () => {
+ it('full dependency', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ describe('version', () => {
+ it('does not render any version information when not supplied', () => {
+ createComponent({
+ ...fullDependencyLink,
+ dependency: { ...dependency, versionPattern: undefined },
+ });
+
+ expect(dependencyVersion().exists()).toBe(false);
+ });
+
+ it('does render version info when it exists', () => {
+ createComponent();
+
+ expect(dependencyVersion().exists()).toBe(true);
+ expect(dependencyVersion().text()).toBe(dependency.versionPattern);
+ });
+ });
+
+ describe('target framework', () => {
+ it('does not render any framework information when not supplied', () => {
+ createComponent({
+ ...fullDependencyLink,
+ metadata: { ...metadata, targetFramework: undefined },
+ });
+
+ expect(dependencyFramework().exists()).toBe(false);
+ });
+
+ it('does render framework info when it exists', () => {
+ createComponent();
+
+ expect(dependencyFramework().exists()).toBe(true);
+ expect(dependencyFramework().text()).toBe(`(${metadata.targetFramework})`);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/file_sha_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/file_sha_spec.js
new file mode 100644
index 00000000000..ebfbbe5b864
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/file_sha_spec.js
@@ -0,0 +1,33 @@
+import { shallowMount } from '@vue/test-utils';
+
+import FileSha from '~/packages_and_registries/package_registry/components/details/file_sha.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
+
+describe('FileSha', () => {
+ let wrapper;
+
+ const defaultProps = { sha: 'foo', title: 'bar' };
+
+ function createComponent() {
+ wrapper = shallowMount(FileSha, {
+ propsData: {
+ ...defaultProps,
+ },
+ stubs: {
+ ClipboardButton,
+ DetailsRow,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/installation_title_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/installation_title_spec.js
new file mode 100644
index 00000000000..5fe795f768e
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/installation_title_spec.js
@@ -0,0 +1,58 @@
+import { shallowMount } from '@vue/test-utils';
+
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import PersistedDropdownSelection from '~/vue_shared/components/registry/persisted_dropdown_selection.vue';
+
+describe('InstallationTitle', () => {
+ let wrapper;
+
+ const defaultProps = { packageType: 'foo', options: [{ value: 'foo', label: 'bar' }] };
+
+ const findPersistedDropdownSelection = () => wrapper.findComponent(PersistedDropdownSelection);
+ const findTitle = () => wrapper.find('h3');
+
+ function createComponent({ props = {} } = {}) {
+ wrapper = shallowMount(InstallationTitle, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('has a title', () => {
+ createComponent();
+
+ expect(findTitle().exists()).toBe(true);
+ expect(findTitle().text()).toBe('Installation');
+ });
+
+ describe('persisted dropdown selection', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findPersistedDropdownSelection().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ createComponent();
+
+ expect(findPersistedDropdownSelection().props()).toMatchObject({
+ storageKey: 'package_foo_installation_instructions',
+ options: defaultProps.options,
+ });
+ });
+
+ it('on change event emits a change event', () => {
+ createComponent();
+
+ findPersistedDropdownSelection().vm.$emit('change', 'baz');
+
+ expect(wrapper.emitted('change')).toEqual([['baz']]);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/installations_commands_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/installations_commands_spec.js
new file mode 100644
index 00000000000..b24946c8638
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/installations_commands_spec.js
@@ -0,0 +1,64 @@
+import { shallowMount } from '@vue/test-utils';
+import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
+import ComposerInstallation from '~/packages_and_registries/package_registry/components/details/composer_installation.vue';
+import ConanInstallation from '~/packages_and_registries/package_registry/components/details/conan_installation.vue';
+import InstallationCommands from '~/packages_and_registries/package_registry/components/details/installation_commands.vue';
+
+import MavenInstallation from '~/packages_and_registries/package_registry/components/details/maven_installation.vue';
+import NpmInstallation from '~/packages_and_registries/package_registry/components/details/npm_installation.vue';
+import NugetInstallation from '~/packages_and_registries/package_registry/components/details/nuget_installation.vue';
+import PypiInstallation from '~/packages_and_registries/package_registry/components/details/pypi_installation.vue';
+import {
+ PACKAGE_TYPE_CONAN,
+ PACKAGE_TYPE_MAVEN,
+ PACKAGE_TYPE_NPM,
+ PACKAGE_TYPE_NUGET,
+ PACKAGE_TYPE_PYPI,
+ PACKAGE_TYPE_COMPOSER,
+} from '~/packages_and_registries/package_registry/constants';
+
+const conanPackage = { ...packageData(), packageType: PACKAGE_TYPE_CONAN };
+const mavenPackage = { ...packageData(), packageType: PACKAGE_TYPE_MAVEN };
+const npmPackage = { ...packageData(), packageType: PACKAGE_TYPE_NPM };
+const nugetPackage = { ...packageData(), packageType: PACKAGE_TYPE_NUGET };
+const pypiPackage = { ...packageData(), packageType: PACKAGE_TYPE_PYPI };
+const composerPackage = { ...packageData(), packageType: PACKAGE_TYPE_COMPOSER };
+
+describe('InstallationCommands', () => {
+ let wrapper;
+
+ function createComponent(propsData) {
+ wrapper = shallowMount(InstallationCommands, {
+ propsData,
+ });
+ }
+
+ const npmInstallation = () => wrapper.find(NpmInstallation);
+ const mavenInstallation = () => wrapper.find(MavenInstallation);
+ const conanInstallation = () => wrapper.find(ConanInstallation);
+ const nugetInstallation = () => wrapper.find(NugetInstallation);
+ const pypiInstallation = () => wrapper.find(PypiInstallation);
+ const composerInstallation = () => wrapper.find(ComposerInstallation);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('installation instructions', () => {
+ describe.each`
+ packageEntity | selector
+ ${conanPackage} | ${conanInstallation}
+ ${mavenPackage} | ${mavenInstallation}
+ ${npmPackage} | ${npmInstallation}
+ ${nugetPackage} | ${nugetInstallation}
+ ${pypiPackage} | ${pypiInstallation}
+ ${composerPackage} | ${composerInstallation}
+ `('renders', ({ packageEntity, selector }) => {
+ it(`${packageEntity.packageType} instructions exist`, () => {
+ createComponent({ packageEntity });
+
+ expect(selector()).toExist();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/maven_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/maven_installation_spec.js
new file mode 100644
index 00000000000..eed7e903833
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/maven_installation_spec.js
@@ -0,0 +1,213 @@
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import {
+ packageData,
+ mavenMetadata,
+} from 'jest/packages_and_registries/package_registry/mock_data';
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import MavenInstallation from '~/packages_and_registries/package_registry/components/details/maven_installation.vue';
+import {
+ TRACKING_ACTION_COPY_MAVEN_XML,
+ TRACKING_ACTION_COPY_MAVEN_COMMAND,
+ TRACKING_ACTION_COPY_MAVEN_SETUP,
+ TRACKING_ACTION_COPY_GRADLE_INSTALL_COMMAND,
+ TRACKING_ACTION_COPY_GRADLE_ADD_TO_SOURCE_COMMAND,
+ TRACKING_ACTION_COPY_KOTLIN_INSTALL_COMMAND,
+ TRACKING_ACTION_COPY_KOTLIN_ADD_TO_SOURCE_COMMAND,
+ PACKAGE_TYPE_MAVEN,
+} from '~/packages_and_registries/package_registry/constants';
+import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
+
+describe('MavenInstallation', () => {
+ let wrapper;
+
+ const packageEntity = {
+ ...packageData(),
+ packageType: PACKAGE_TYPE_MAVEN,
+ metadata: mavenMetadata(),
+ };
+
+ const mavenHelpPath = 'mavenHelpPath';
+ const mavenPath = 'mavenPath';
+
+ const xmlCodeBlock = `<dependency>
+ <groupId>appGroup</groupId>
+ <artifactId>appName</artifactId>
+ <version>appVersion</version>
+</dependency>`;
+ const mavenCommandStr = 'mvn dependency:get -Dartifact=appGroup:appName:appVersion';
+ const mavenSetupXml = `<repositories>
+ <repository>
+ <id>gitlab-maven</id>
+ <url>${mavenPath}</url>
+ </repository>
+</repositories>
+
+<distributionManagement>
+ <repository>
+ <id>gitlab-maven</id>
+ <url>${mavenPath}</url>
+ </repository>
+
+ <snapshotRepository>
+ <id>gitlab-maven</id>
+ <url>${mavenPath}</url>
+ </snapshotRepository>
+</distributionManagement>`;
+ const gradleGroovyInstallCommandText = `implementation 'appGroup:appName:appVersion'`;
+ const gradleGroovyAddSourceCommandText = `maven {
+ url '${mavenPath}'
+}`;
+ const gradleKotlinInstallCommandText = `implementation("appGroup:appName:appVersion")`;
+ const gradleKotlinAddSourceCommandText = `maven("${mavenPath}")`;
+
+ const findCodeInstructions = () => wrapper.findAllComponents(CodeInstructions);
+ const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+
+ function createComponent({ data = {} } = {}) {
+ wrapper = shallowMountExtended(MavenInstallation, {
+ provide: {
+ mavenHelpPath,
+ mavenPath,
+ },
+ propsData: {
+ packageEntity,
+ },
+ data() {
+ return data;
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('install command switch', () => {
+ it('has the installation title component', () => {
+ createComponent();
+
+ expect(findInstallationTitle().exists()).toBe(true);
+ expect(findInstallationTitle().props()).toMatchObject({
+ packageType: 'maven',
+ options: [
+ { value: 'maven', label: 'Maven XML' },
+ { value: 'groovy', label: 'Gradle Groovy DSL' },
+ { value: 'kotlin', label: 'Gradle Kotlin DSL' },
+ ],
+ });
+ });
+
+ it('on change event updates the instructions to show', async () => {
+ createComponent();
+
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(xmlCodeBlock);
+ findInstallationTitle().vm.$emit('change', 'groovy');
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(
+ gradleGroovyInstallCommandText,
+ );
+ });
+ });
+
+ describe('maven', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct xml block', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: xmlCodeBlock,
+ multiline: true,
+ trackingAction: TRACKING_ACTION_COPY_MAVEN_XML,
+ });
+ });
+
+ it('renders the correct maven command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: mavenCommandStr,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_MAVEN_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct xml block', () => {
+ expect(findCodeInstructions().at(2).props()).toMatchObject({
+ instruction: mavenSetupXml,
+ multiline: true,
+ trackingAction: TRACKING_ACTION_COPY_MAVEN_SETUP,
+ });
+ });
+ });
+ });
+
+ describe('groovy', () => {
+ beforeEach(() => {
+ createComponent({ data: { instructionType: 'groovy' } });
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the gradle install command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: gradleGroovyInstallCommandText,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_GRADLE_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct gradle command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: gradleGroovyAddSourceCommandText,
+ multiline: true,
+ trackingAction: TRACKING_ACTION_COPY_GRADLE_ADD_TO_SOURCE_COMMAND,
+ });
+ });
+ });
+ });
+
+ describe('kotlin', () => {
+ beforeEach(() => {
+ createComponent({ data: { instructionType: 'kotlin' } });
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the gradle install command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: gradleKotlinInstallCommandText,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_KOTLIN_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct gradle command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: gradleKotlinAddSourceCommandText,
+ multiline: true,
+ trackingAction: TRACKING_ACTION_COPY_KOTLIN_ADD_TO_SOURCE_COMMAND,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js
new file mode 100644
index 00000000000..083c6858ad0
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js
@@ -0,0 +1,122 @@
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import NpmInstallation from '~/packages_and_registries/package_registry/components/details/npm_installation.vue';
+import {
+ TRACKING_ACTION_COPY_NPM_INSTALL_COMMAND,
+ TRACKING_ACTION_COPY_NPM_SETUP_COMMAND,
+ TRACKING_ACTION_COPY_YARN_INSTALL_COMMAND,
+ TRACKING_ACTION_COPY_YARN_SETUP_COMMAND,
+ PACKAGE_TYPE_NPM,
+ NPM_PACKAGE_MANAGER,
+ YARN_PACKAGE_MANAGER,
+} from '~/packages_and_registries/package_registry/constants';
+import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
+
+const packageEntity = { ...packageData(), packageType: PACKAGE_TYPE_NPM };
+
+describe('NpmInstallation', () => {
+ let wrapper;
+
+ const npmInstallationCommandLabel = 'npm i @gitlab-org/package-15';
+ const yarnInstallationCommandLabel = 'yarn add @gitlab-org/package-15';
+
+ const findCodeInstructions = () => wrapper.findAllComponents(CodeInstructions);
+ const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+
+ function createComponent({ data = {} } = {}) {
+ wrapper = shallowMountExtended(NpmInstallation, {
+ provide: {
+ npmHelpPath: 'npmHelpPath',
+ npmPath: 'npmPath',
+ },
+ propsData: {
+ packageEntity,
+ },
+ data() {
+ return data;
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('install command switch', () => {
+ it('has the installation title component', () => {
+ expect(findInstallationTitle().exists()).toBe(true);
+ expect(findInstallationTitle().props()).toMatchObject({
+ packageType: NPM_PACKAGE_MANAGER,
+ options: [
+ { value: NPM_PACKAGE_MANAGER, label: 'Show NPM commands' },
+ { value: YARN_PACKAGE_MANAGER, label: 'Show Yarn commands' },
+ ],
+ });
+ });
+
+ it('on change event updates the instructions to show', async () => {
+ createComponent();
+
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(npmInstallationCommandLabel);
+ findInstallationTitle().vm.$emit('change', YARN_PACKAGE_MANAGER);
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(yarnInstallationCommandLabel);
+ });
+ });
+
+ describe('npm', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders the correct installation command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: npmInstallationCommandLabel,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_NPM_INSTALL_COMMAND,
+ });
+ });
+
+ it('renders the correct setup command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: 'echo @gitlab-org:registry=npmPath/ >> .npmrc',
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_NPM_SETUP_COMMAND,
+ });
+ });
+ });
+
+ describe('yarn', () => {
+ beforeEach(() => {
+ createComponent({ data: { instructionType: YARN_PACKAGE_MANAGER } });
+ });
+
+ it('renders the correct setup command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: yarnInstallationCommandLabel,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_YARN_INSTALL_COMMAND,
+ });
+ });
+
+ it('renders the correct registry command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: 'echo \\"@gitlab-org:registry\\" \\"npmPath/\\" >> .yarnrc',
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_YARN_SETUP_COMMAND,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/nuget_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/nuget_installation_spec.js
new file mode 100644
index 00000000000..c48a3f07299
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/nuget_installation_spec.js
@@ -0,0 +1,75 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import NugetInstallation from '~/packages_and_registries/package_registry/components/details/nuget_installation.vue';
+import {
+ TRACKING_ACTION_COPY_NUGET_INSTALL_COMMAND,
+ TRACKING_ACTION_COPY_NUGET_SETUP_COMMAND,
+ PACKAGE_TYPE_NUGET,
+} from '~/packages_and_registries/package_registry/constants';
+import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
+
+const packageEntity = { ...packageData(), packageType: PACKAGE_TYPE_NUGET };
+
+describe('NugetInstallation', () => {
+ let wrapper;
+
+ const nugetInstallationCommandStr = 'nuget install @gitlab-org/package-15 -Source "GitLab"';
+ const nugetSetupCommandStr =
+ 'nuget source Add -Name "GitLab" -Source "nugetPath" -UserName <your_username> -Password <your_token>';
+
+ const findCodeInstructions = () => wrapper.findAllComponents(CodeInstructions);
+ const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+
+ function createComponent() {
+ wrapper = shallowMountExtended(NugetInstallation, {
+ provide: {
+ nugetHelpPath: 'nugetHelpPath',
+ nugetPath: 'nugetPath',
+ },
+ propsData: {
+ packageEntity,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('install command switch', () => {
+ it('has the installation title component', () => {
+ expect(findInstallationTitle().exists()).toBe(true);
+ expect(findInstallationTitle().props()).toMatchObject({
+ packageType: 'nuget',
+ options: [{ value: 'nuget', label: 'Show Nuget commands' }],
+ });
+ });
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: nugetInstallationCommandStr,
+ trackingAction: TRACKING_ACTION_COPY_NUGET_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: nugetSetupCommandStr,
+ trackingAction: TRACKING_ACTION_COPY_NUGET_SETUP_COMMAND,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
new file mode 100644
index 00000000000..042b2026199
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
@@ -0,0 +1,272 @@
+import { GlDropdown, GlButton } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import stubChildren from 'helpers/stub_children';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { packageFiles as packageFilesMock } from 'jest/packages_and_registries/package_registry/mock_data';
+import PackageFiles from '~/packages_and_registries/package_registry/components/details/package_files.vue';
+import FileIcon from '~/vue_shared/components/file_icon.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+describe('Package Files', () => {
+ let wrapper;
+
+ const findAllRows = () => wrapper.findAllByTestId('file-row');
+ const findFirstRow = () => extendedWrapper(findAllRows().at(0));
+ const findSecondRow = () => extendedWrapper(findAllRows().at(1));
+ const findFirstRowDownloadLink = () => findFirstRow().findByTestId('download-link');
+ const findFirstRowCommitLink = () => findFirstRow().findByTestId('commit-link');
+ const findSecondRowCommitLink = () => findSecondRow().findByTestId('commit-link');
+ const findFirstRowFileIcon = () => findFirstRow().findComponent(FileIcon);
+ const findFirstRowCreatedAt = () => findFirstRow().findComponent(TimeAgoTooltip);
+ const findFirstActionMenu = () => extendedWrapper(findFirstRow().findComponent(GlDropdown));
+ const findActionMenuDelete = () => findFirstActionMenu().findByTestId('delete-file');
+ const findFirstToggleDetailsButton = () => findFirstRow().findComponent(GlButton);
+ const findFirstRowShaComponent = (id) => wrapper.findByTestId(id);
+
+ const files = packageFilesMock();
+ const [file] = files;
+
+ const createComponent = ({ packageFiles = [file], canDelete = true } = {}) => {
+ wrapper = mountExtended(PackageFiles, {
+ provide: { canDelete },
+ propsData: {
+ packageFiles,
+ },
+ stubs: {
+ ...stubChildren(PackageFiles),
+ GlTable: false,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rows', () => {
+ it('renders a single file for an npm package', () => {
+ createComponent();
+
+ expect(findAllRows()).toHaveLength(1);
+ });
+
+ it('renders multiple files for a package that contains more than one file', () => {
+ createComponent({ packageFiles: files });
+
+ expect(findAllRows()).toHaveLength(2);
+ });
+ });
+
+ describe('link', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findFirstRowDownloadLink().exists()).toBe(true);
+ });
+
+ it('has the correct attrs bound', () => {
+ createComponent();
+
+ expect(findFirstRowDownloadLink().attributes('href')).toBe(file.downloadPath);
+ });
+
+ it('emits "download-file" event on click', () => {
+ createComponent();
+
+ findFirstRowDownloadLink().vm.$emit('click');
+
+ expect(wrapper.emitted('download-file')).toEqual([[]]);
+ });
+ });
+
+ describe('file-icon', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findFirstRowFileIcon().exists()).toBe(true);
+ });
+
+ it('has the correct props bound', () => {
+ createComponent();
+
+ expect(findFirstRowFileIcon().props('fileName')).toBe(file.fileName);
+ });
+ });
+
+ describe('time-ago tooltip', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findFirstRowCreatedAt().exists()).toBe(true);
+ });
+
+ it('has the correct props bound', () => {
+ createComponent();
+
+ expect(findFirstRowCreatedAt().props('time')).toBe(file.createdAt);
+ });
+ });
+
+ describe('commit', () => {
+ const withPipeline = {
+ ...file,
+ pipelines: [
+ {
+ sha: 'sha',
+ id: 1,
+ commitPath: 'commitPath',
+ },
+ ],
+ };
+
+ describe('when package file has a pipeline associated', () => {
+ it('exists', () => {
+ createComponent({ packageFiles: [withPipeline] });
+
+ expect(findFirstRowCommitLink().exists()).toBe(true);
+ });
+
+ it('the link points to the commit path', () => {
+ createComponent({ packageFiles: [withPipeline] });
+
+ expect(findFirstRowCommitLink().attributes('href')).toBe(
+ withPipeline.pipelines[0].commitPath,
+ );
+ });
+
+ it('the text is the pipeline sha', () => {
+ createComponent({ packageFiles: [withPipeline] });
+
+ expect(findFirstRowCommitLink().text()).toBe(withPipeline.pipelines[0].sha);
+ });
+ });
+
+ describe('when package file has no pipeline associated', () => {
+ it('does not exist', () => {
+ createComponent();
+
+ expect(findFirstRowCommitLink().exists()).toBe(false);
+ });
+ });
+
+ describe('when only one file lacks an associated pipeline', () => {
+ it('renders the commit when it exists and not otherwise', () => {
+ createComponent({ packageFiles: [withPipeline, file] });
+
+ expect(findFirstRowCommitLink().exists()).toBe(true);
+ expect(findSecondRowCommitLink().exists()).toBe(false);
+ });
+ });
+
+ describe('action menu', () => {
+ describe('when the user can delete', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findFirstActionMenu().exists()).toBe(true);
+ });
+
+ describe('menu items', () => {
+ describe('delete file', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findActionMenuDelete().exists()).toBe(true);
+ });
+
+ it('emits a delete event when clicked', () => {
+ createComponent();
+
+ findActionMenuDelete().vm.$emit('click');
+
+ const [[{ id }]] = wrapper.emitted('delete-file');
+ expect(id).toBe(file.id);
+ });
+ });
+ });
+ });
+
+ describe('when the user can not delete', () => {
+ const canDelete = false;
+
+ it('does not exist', () => {
+ createComponent({ canDelete });
+
+ expect(findFirstActionMenu().exists()).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('additional details', () => {
+ describe('details toggle button', () => {
+ it('exists', () => {
+ createComponent();
+
+ expect(findFirstToggleDetailsButton().exists()).toBe(true);
+ });
+
+ it('is hidden when no details is present', () => {
+ const { ...noShaFile } = file;
+ noShaFile.fileSha256 = null;
+ noShaFile.fileMd5 = null;
+ noShaFile.fileSha1 = null;
+ createComponent({ packageFiles: [noShaFile] });
+
+ expect(findFirstToggleDetailsButton().exists()).toBe(false);
+ });
+
+ it('toggles the details row', async () => {
+ createComponent();
+
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-down');
+
+ findFirstToggleDetailsButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findFirstRowShaComponent('sha-256').exists()).toBe(true);
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-up');
+
+ findFirstToggleDetailsButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findFirstRowShaComponent('sha-256').exists()).toBe(false);
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-down');
+ });
+ });
+
+ describe('file shas', () => {
+ const showShaFiles = () => {
+ findFirstToggleDetailsButton().vm.$emit('click');
+ return nextTick();
+ };
+
+ it.each`
+ selector | title | sha
+ ${'sha-256'} | ${'SHA-256'} | ${'fileSha256'}
+ ${'md5'} | ${'MD5'} | ${'fileMd5'}
+ ${'sha-1'} | ${'SHA-1'} | ${'be93151dc23ac34a82752444556fe79b32c7a1ad'}
+ `('has a $title row', async ({ selector, title, sha }) => {
+ createComponent();
+
+ await showShaFiles();
+
+ expect(findFirstRowShaComponent(selector).props()).toMatchObject({
+ title,
+ sha,
+ });
+ });
+
+ it('does not display a row when the data is missing', async () => {
+ const { ...missingMd5 } = file;
+ missingMd5.fileMd5 = null;
+
+ createComponent({ packageFiles: [missingMd5] });
+
+ await showShaFiles();
+
+ expect(findFirstRowShaComponent('md5').exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
new file mode 100644
index 00000000000..b69008f04f0
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
@@ -0,0 +1,122 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { stubComponent } from 'helpers/stub_component';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ packageData,
+ packagePipelines,
+} from 'jest/packages_and_registries/package_registry/mock_data';
+import { HISTORY_PIPELINES_LIMIT } from '~/packages/details/constants';
+import component from '~/packages_and_registries/package_registry/components/details/package_history.vue';
+import HistoryItem from '~/vue_shared/components/registry/history_item.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+describe('Package History', () => {
+ let wrapper;
+ const defaultProps = {
+ projectName: 'baz project',
+ packageEntity: { ...packageData() },
+ };
+
+ const [onePipeline] = packagePipelines();
+
+ const createPipelines = (amount) =>
+ [...Array(amount)].map((x, index) => packagePipelines({ id: index + 1 })[0]);
+
+ const mountComponent = (props) => {
+ wrapper = shallowMountExtended(component, {
+ propsData: { ...defaultProps, ...props },
+ stubs: {
+ HistoryItem: stubComponent(HistoryItem, {
+ template: '<div data-testid="history-element"><slot></slot></div>',
+ }),
+ GlSprintf,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findHistoryElement = (testId) => wrapper.findByTestId(testId);
+ const findElementLink = (container) => container.findComponent(GlLink);
+ const findElementTimeAgo = (container) => container.findComponent(TimeAgoTooltip);
+ const findTitle = () => wrapper.findByTestId('title');
+ const findTimeline = () => wrapper.findByTestId('timeline');
+
+ it('has the correct title', () => {
+ mountComponent();
+
+ const title = findTitle();
+
+ expect(title.exists()).toBe(true);
+ expect(title.text()).toBe('History');
+ });
+
+ it('has a timeline container', () => {
+ mountComponent();
+
+ const title = findTimeline();
+
+ expect(title.exists()).toBe(true);
+ expect(title.classes()).toEqual(
+ expect.arrayContaining(['timeline', 'main-notes-list', 'notes']),
+ );
+ });
+
+ describe.each`
+ name | amount | icon | text | timeAgoTooltip | link
+ ${'created-on'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'clock'} | ${'@gitlab-org/package-15 version 1.0.0 was first created'} | ${packageData().createdAt} | ${null}
+ ${'first-pipeline-commit'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'commit'} | ${'Created by commit #b83d6e39 on branch master'} | ${null} | ${onePipeline.commitPath}
+ ${'first-pipeline-pipeline'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pipeline'} | ${'Built by pipeline #1 triggered by Administrator'} | ${onePipeline.createdAt} | ${onePipeline.path}
+ ${'published'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'package'} | ${'Published to the baz project Package Registry'} | ${packageData().createdAt} | ${null}
+ ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'history'} | ${'Package has 1 archived update'} | ${null} | ${null}
+ ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 3} | ${'history'} | ${'Package has 2 archived updates'} | ${null} | ${null}
+ ${'pipeline-entry'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pencil'} | ${'Package updated by commit #b83d6e39 on branch master, built by pipeline #3, and published to the registry'} | ${packageData().createdAt} | ${onePipeline.commitPath}
+ `(
+ 'with $amount pipelines history element $name',
+ ({ name, icon, text, timeAgoTooltip, link, amount }) => {
+ let element;
+
+ beforeEach(() => {
+ const packageEntity = { ...packageData(), pipelines: { nodes: createPipelines(amount) } };
+ mountComponent({
+ packageEntity,
+ });
+ element = findHistoryElement(name);
+ });
+
+ it('exists', () => {
+ expect(element.exists()).toBe(true);
+ });
+
+ it('has the correct icon', () => {
+ expect(element.props('icon')).toBe(icon);
+ });
+
+ it('has the correct text', () => {
+ expect(element.text()).toBe(text);
+ });
+
+ it('time-ago tooltip', () => {
+ const timeAgo = findElementTimeAgo(element);
+ const exist = Boolean(timeAgoTooltip);
+
+ expect(timeAgo.exists()).toBe(exist);
+ if (exist) {
+ expect(timeAgo.props('time')).toBe(timeAgoTooltip);
+ }
+ });
+
+ it('link', () => {
+ const linkElement = findElementLink(element);
+ const exist = Boolean(link);
+
+ expect(linkElement.exists()).toBe(exist);
+ if (exist) {
+ expect(linkElement.attributes('href')).toBe(link);
+ }
+ });
+ },
+ );
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
new file mode 100644
index 00000000000..327f6d81905
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
@@ -0,0 +1,202 @@
+import { GlIcon, GlSprintf } from '@gitlab/ui';
+import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PackageTags from '~/packages/shared/components/package_tags.vue';
+import PackageTitle from '~/packages_and_registries/package_registry/components/details/package_title.vue';
+import {
+ PACKAGE_TYPE_CONAN,
+ PACKAGE_TYPE_MAVEN,
+ PACKAGE_TYPE_NPM,
+ PACKAGE_TYPE_NUGET,
+} from '~/packages_and_registries/package_registry/constants';
+import TitleArea from '~/vue_shared/components/registry/title_area.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+import { packageData, packageFiles, packageTags, packagePipelines } from '../../mock_data';
+
+const packageWithTags = {
+ ...packageData(),
+ tags: { nodes: packageTags() },
+ packageFiles: { nodes: packageFiles() },
+};
+
+describe('PackageTitle', () => {
+ let wrapper;
+
+ function createComponent(packageEntity = packageWithTags) {
+ wrapper = shallowMountExtended(PackageTitle, {
+ propsData: { packageEntity },
+ stubs: {
+ TitleArea,
+ GlSprintf,
+ },
+ });
+ return wrapper.vm.$nextTick();
+ }
+
+ const findTitleArea = () => wrapper.findComponent(TitleArea);
+ const findPackageType = () => wrapper.findByTestId('package-type');
+ const findPackageSize = () => wrapper.findByTestId('package-size');
+ const findPipelineProject = () => wrapper.findByTestId('pipeline-project');
+ const findPackageRef = () => wrapper.findByTestId('package-ref');
+ const findPackageTags = () => wrapper.findComponent(PackageTags);
+ const findPackageBadges = () => wrapper.findAllByTestId('tag-badge');
+ const findSubHeaderIcon = () => wrapper.findComponent(GlIcon);
+ const findSubHeaderText = () => wrapper.findByTestId('sub-header');
+ const findSubHeaderTimeAgo = () => wrapper.findComponent(TimeAgoTooltip);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('renders', () => {
+ it('without tags', async () => {
+ await createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('with tags', async () => {
+ await createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('with tags on mobile', async () => {
+ jest.spyOn(GlBreakpointInstance, 'isDesktop').mockReturnValue(false);
+ await createComponent();
+
+ await wrapper.vm.$nextTick();
+
+ expect(findPackageBadges()).toHaveLength(packageTags().length);
+ });
+ });
+
+ describe('package title', () => {
+ it('is correctly bound', async () => {
+ await createComponent();
+
+ expect(findTitleArea().props('title')).toBe(packageData().name);
+ });
+ });
+
+ describe('package icon', () => {
+ const iconUrl = 'a-fake-src';
+
+ it('shows an icon when present and package type is NUGET', async () => {
+ await createComponent({
+ ...packageData(),
+ packageType: PACKAGE_TYPE_NUGET,
+ metadata: { iconUrl },
+ });
+
+ expect(findTitleArea().props('avatar')).toBe(iconUrl);
+ });
+
+ it('hides the icon when not present', async () => {
+ await createComponent();
+
+ expect(findTitleArea().props('avatar')).toBe(null);
+ });
+ });
+
+ describe('sub-header', () => {
+ it('has the eye icon', async () => {
+ await createComponent();
+
+ expect(findSubHeaderIcon().props('name')).toBe('eye');
+ });
+
+ it('has a text showing version', async () => {
+ await createComponent();
+
+ expect(findSubHeaderText().text()).toMatchInterpolatedText('v 1.0.0 published');
+ });
+
+ it('has a time ago tooltip component', async () => {
+ await createComponent();
+ expect(findSubHeaderTimeAgo().props('time')).toBe(packageWithTags.createdAt);
+ });
+ });
+
+ describe.each`
+ packageType | text
+ ${PACKAGE_TYPE_CONAN} | ${'Conan'}
+ ${PACKAGE_TYPE_MAVEN} | ${'Maven'}
+ ${PACKAGE_TYPE_NPM} | ${'npm'}
+ ${PACKAGE_TYPE_NUGET} | ${'NuGet'}
+ `(`package type`, ({ packageType, text }) => {
+ beforeEach(() => createComponent({ ...packageData, packageType }));
+
+ it(`${packageType} should render ${text}`, () => {
+ expect(findPackageType().props()).toEqual(expect.objectContaining({ text, icon: 'package' }));
+ });
+ });
+
+ describe('calculates the package size', () => {
+ it('correctly calculates when there is only 1 file', async () => {
+ await createComponent({ ...packageData(), packageFiles: { nodes: [packageFiles()[0]] } });
+
+ expect(findPackageSize().props()).toMatchObject({ text: '400.00 KiB', icon: 'disk' });
+ });
+
+ it('correctly calculates when there are multiple files', async () => {
+ await createComponent();
+
+ expect(findPackageSize().props('text')).toBe('800.00 KiB');
+ });
+ });
+
+ describe('package tags', () => {
+ it('displays the package-tags component when the package has tags', async () => {
+ await createComponent();
+
+ expect(findPackageTags().exists()).toBe(true);
+ });
+
+ it('does not display the package-tags component when there are no tags', async () => {
+ await createComponent({ ...packageData(), tags: { nodes: [] } });
+
+ expect(findPackageTags().exists()).toBe(false);
+ });
+ });
+
+ describe('package ref', () => {
+ it('does not display the ref if missing', async () => {
+ await createComponent();
+
+ expect(findPackageRef().exists()).toBe(false);
+ });
+
+ it('correctly shows the package ref if there is one', async () => {
+ await createComponent({
+ ...packageData(),
+ pipelines: { nodes: packagePipelines({ ref: 'test' }) },
+ });
+ expect(findPackageRef().props()).toMatchObject({
+ text: 'test',
+ icon: 'branch',
+ });
+ });
+ });
+
+ describe('pipeline project', () => {
+ it('does not display the project if missing', async () => {
+ await createComponent();
+
+ expect(findPipelineProject().exists()).toBe(false);
+ });
+
+ it('correctly shows the pipeline project if there is one', async () => {
+ await createComponent({
+ ...packageData(),
+ pipelines: { nodes: packagePipelines() },
+ });
+ expect(findPipelineProject().props()).toMatchObject({
+ text: packagePipelines()[0].project.name,
+ icon: 'review-list',
+ link: packagePipelines()[0].project.webUrl,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
new file mode 100644
index 00000000000..410c1b65348
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
@@ -0,0 +1,80 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
+import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
+import PypiInstallation from '~/packages_and_registries/package_registry/components/details/pypi_installation.vue';
+import {
+ PACKAGE_TYPE_PYPI,
+ TRACKING_ACTION_COPY_PIP_INSTALL_COMMAND,
+ TRACKING_ACTION_COPY_PYPI_SETUP_COMMAND,
+} from '~/packages_and_registries/package_registry/constants';
+
+const packageEntity = { ...packageData(), packageType: PACKAGE_TYPE_PYPI };
+
+describe('PypiInstallation', () => {
+ let wrapper;
+
+ const pipCommandStr = 'pip install @gitlab-org/package-15 --extra-index-url pypiPath';
+ const pypiSetupStr = `[gitlab]
+repository = pypiSetupPath
+username = __token__
+password = <your personal access token>`;
+
+ const pipCommand = () => wrapper.findByTestId('pip-command');
+ const setupInstruction = () => wrapper.findByTestId('pypi-setup-content');
+
+ const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+
+ function createComponent() {
+ wrapper = shallowMountExtended(PypiInstallation, {
+ provide: {
+ pypiHelpPath: 'pypiHelpPath',
+ pypiPath: 'pypiPath',
+ pypiSetupPath: 'pypiSetupPath',
+ },
+ propsData: {
+ packageEntity,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('install command switch', () => {
+ it('has the installation title component', () => {
+ expect(findInstallationTitle().exists()).toBe(true);
+ expect(findInstallationTitle().props()).toMatchObject({
+ packageType: 'pypi',
+ options: [{ value: 'pypi', label: 'Show PyPi commands' }],
+ });
+ });
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct pip command', () => {
+ expect(pipCommand().props()).toMatchObject({
+ instruction: pipCommandStr,
+ trackingAction: TRACKING_ACTION_COPY_PIP_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct setup block', () => {
+ expect(setupInstruction().props()).toMatchObject({
+ instruction: pypiSetupStr,
+ multiline: true,
+ trackingAction: TRACKING_ACTION_COPY_PYPI_SETUP_COMMAND,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js
new file mode 100644
index 00000000000..f7613949fe4
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js
@@ -0,0 +1,89 @@
+import { GlLink, GlSprintf, GlTruncate } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import PackageTags from '~/packages/shared/components/package_tags.vue';
+import PublishMethod from '~/packages/shared/components/publish_method.vue';
+import VersionRow from '~/packages_and_registries/package_registry/components/details/version_row.vue';
+import ListItem from '~/vue_shared/components/registry/list_item.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+import { packageVersions } from '../../mock_data';
+
+const packageVersion = packageVersions()[0];
+
+describe('VersionRow', () => {
+ let wrapper;
+
+ const findListItem = () => wrapper.findComponent(ListItem);
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findPackageTags = () => wrapper.findComponent(PackageTags);
+ const findPublishMethod = () => wrapper.findComponent(PublishMethod);
+ const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
+
+ function createComponent(packageEntity = packageVersion) {
+ wrapper = shallowMountExtended(VersionRow, {
+ propsData: {
+ packageEntity,
+ },
+ stubs: {
+ ListItem,
+ GlSprintf,
+ GlTruncate,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('has a link to the version detail', () => {
+ createComponent();
+
+ expect(findLink().attributes('href')).toBe(`${getIdFromGraphQLId(packageVersion.id)}`);
+ expect(findLink().text()).toBe(packageVersion.name);
+ });
+
+ it('has the version of the package', () => {
+ createComponent();
+
+ expect(wrapper.text()).toContain(packageVersion.version);
+ });
+
+ it('has a package tags component', () => {
+ createComponent();
+
+ expect(findPackageTags().props('tags')).toBe(packageVersion.tags.nodes);
+ });
+
+ it('has a publish method component', () => {
+ createComponent();
+
+ expect(findPublishMethod().props('packageEntity')).toBe(packageVersion);
+ });
+ it('has a time-ago tooltip', () => {
+ createComponent();
+
+ expect(findTimeAgoTooltip().props('time')).toBe(packageVersion.createdAt);
+ });
+
+ describe('disabled status', () => {
+ it('disables the list item', () => {
+ createComponent({ ...packageVersion, status: 'something' });
+
+ expect(findListItem().props('disabled')).toBe(true);
+ });
+
+ it('disables the link', () => {
+ createComponent({ ...packageVersion, status: 'something' });
+
+ expect(findLink().attributes('disabled')).toBe('true');
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
new file mode 100644
index 00000000000..98ff29ef728
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -0,0 +1,251 @@
+export const packageTags = () => [
+ { id: 'gid://gitlab/Packages::Tag/87', name: 'bananas_9', __typename: 'PackageTag' },
+ { id: 'gid://gitlab/Packages::Tag/86', name: 'bananas_8', __typename: 'PackageTag' },
+ { id: 'gid://gitlab/Packages::Tag/85', name: 'bananas_7', __typename: 'PackageTag' },
+];
+
+export const packagePipelines = (extend) => [
+ {
+ commitPath: '/namespace14/project14/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ createdAt: '2020-08-17T14:23:32Z',
+ id: 'gid://gitlab/Ci::Pipeline/36',
+ path: '/namespace14/project14/-/pipelines/36',
+ name: 'project14',
+ ref: 'master',
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ project: {
+ name: 'project14',
+ webUrl: 'http://gdk.test:3000/namespace14/project14',
+ __typename: 'Project',
+ },
+ user: {
+ name: 'Administrator',
+ },
+ ...extend,
+ __typename: 'Pipeline',
+ },
+];
+
+export const packageFiles = () => [
+ {
+ id: 'gid://gitlab/Packages::PackageFile/118',
+ fileMd5: 'fileMd5',
+ fileName: 'foo-1.0.1.tgz',
+ fileSha1: 'be93151dc23ac34a82752444556fe79b32c7a1ad',
+ fileSha256: 'fileSha256',
+ size: '409600',
+ createdAt: '2020-08-17T14:23:32Z',
+ downloadPath: 'downloadPath',
+ __typename: 'PackageFile',
+ },
+ {
+ id: 'gid://gitlab/Packages::PackageFile/119',
+ fileMd5: null,
+ fileName: 'foo-1.0.2.tgz',
+ fileSha1: 'be93151dc23ac34a82752444556fe79b32c7a1ss',
+ fileSha256: null,
+ size: '409600',
+ createdAt: '2020-08-17T14:23:32Z',
+ downloadPath: 'downloadPath',
+ __typename: 'PackageFile',
+ },
+];
+
+export const dependencyLinks = () => [
+ {
+ dependencyType: 'DEPENDENCIES',
+ id: 'gid://gitlab/Packages::DependencyLink/77',
+ __typename: 'PackageDependencyLink',
+ dependency: {
+ id: 'gid://gitlab/Packages::Dependency/3',
+ name: 'Ninject.Extensions.Factory',
+ versionPattern: '3.3.2',
+ __typename: 'PackageDependency',
+ },
+ metadata: {
+ id: 'gid://gitlab/Packages::Nuget::DependencyLinkMetadatum/77',
+ targetFramework: '.NETCoreApp3.1',
+ __typename: 'NugetDependencyLinkMetadata',
+ },
+ },
+ {
+ dependencyType: 'DEPENDENCIES',
+ id: 'gid://gitlab/Packages::DependencyLink/78',
+ __typename: 'PackageDependencyLink',
+ dependency: {
+ id: 'gid://gitlab/Packages::Dependency/4',
+ name: 'Ninject.Extensions.Factory',
+ versionPattern: '3.3.2',
+ __typename: 'PackageDependency',
+ },
+ metadata: {
+ id: 'gid://gitlab/Packages::Nuget::DependencyLinkMetadatum/78',
+ targetFramework: '.NETCoreApp3.1',
+ __typename: 'NugetDependencyLinkMetadata',
+ },
+ },
+];
+
+export const packageVersions = () => [
+ {
+ createdAt: '2021-08-10T09:33:54Z',
+ id: 'gid://gitlab/Packages::Package/243',
+ name: '@gitlab-org/package-15',
+ status: 'DEFAULT',
+ tags: { nodes: packageTags() },
+ version: '1.0.1',
+ __typename: 'Package',
+ },
+ {
+ createdAt: '2021-08-10T09:33:54Z',
+ id: 'gid://gitlab/Packages::Package/244',
+ name: '@gitlab-org/package-15',
+ status: 'DEFAULT',
+ tags: { nodes: packageTags() },
+ version: '1.0.2',
+ __typename: 'Package',
+ },
+];
+
+export const packageData = (extend) => ({
+ id: 'gid://gitlab/Packages::Package/111',
+ name: '@gitlab-org/package-15',
+ packageType: 'NPM',
+ version: '1.0.0',
+ createdAt: '2020-08-17T14:23:32Z',
+ updatedAt: '2020-08-17T14:23:32Z',
+ status: 'DEFAULT',
+ ...extend,
+});
+
+export const conanMetadata = () => ({
+ packageChannel: 'stable',
+ packageUsername: 'gitlab-org+gitlab-test',
+ recipe: 'package-8/1.0.0@gitlab-org+gitlab-test/stable',
+ recipePath: 'package-8/1.0.0/gitlab-org+gitlab-test/stable',
+});
+
+export const composerMetadata = () => ({
+ targetSha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ composerJson: {
+ license: 'MIT',
+ version: '1.0.0',
+ },
+});
+
+export const pypyMetadata = () => ({
+ requiredPython: '1.0.0',
+});
+
+export const mavenMetadata = () => ({
+ appName: 'appName',
+ appGroup: 'appGroup',
+ appVersion: 'appVersion',
+ path: 'path',
+});
+
+export const nugetMetadata = () => ({
+ iconUrl: 'iconUrl',
+ licenseUrl: 'licenseUrl',
+ projectUrl: 'projectUrl',
+});
+
+export const packageDetailsQuery = (extendPackage) => ({
+ data: {
+ package: {
+ ...packageData(),
+ metadata: {
+ ...conanMetadata(),
+ ...composerMetadata(),
+ ...pypyMetadata(),
+ ...mavenMetadata(),
+ ...nugetMetadata(),
+ },
+ project: {
+ path: 'projectPath',
+ },
+ tags: {
+ nodes: packageTags(),
+ __typename: 'PackageTagConnection',
+ },
+ pipelines: {
+ nodes: packagePipelines(),
+ __typename: 'PipelineConnection',
+ },
+ packageFiles: {
+ nodes: packageFiles(),
+ __typename: 'PackageFileConnection',
+ },
+ versions: {
+ nodes: packageVersions(),
+ __typename: 'PackageConnection',
+ },
+ dependencyLinks: {
+ nodes: dependencyLinks(),
+ },
+ __typename: 'PackageDetailsType',
+ ...extendPackage,
+ },
+ },
+});
+
+export const emptyPackageDetailsQuery = () => ({
+ data: {
+ package: {
+ __typename: 'PackageDetailsType',
+ },
+ },
+});
+
+export const packageDestroyMutation = () => ({
+ data: {
+ destroyPackage: {
+ errors: [],
+ },
+ },
+});
+
+export const packageDestroyMutationError = () => ({
+ data: {
+ destroyPackage: null,
+ },
+ errors: [
+ {
+ message:
+ "The resource that you are attempting to access does not exist or you don't have permission to perform this action",
+ locations: [
+ {
+ line: 2,
+ column: 3,
+ },
+ ],
+ path: ['destroyPackage'],
+ },
+ ],
+});
+
+export const packageDestroyFileMutation = () => ({
+ data: {
+ destroyPackageFile: {
+ errors: [],
+ },
+ },
+});
+export const packageDestroyFileMutationError = () => ({
+ data: {
+ destroyPackageFile: null,
+ },
+ errors: [
+ {
+ message:
+ "The resource that you are attempting to access does not exist or you don't have permission to perform this action",
+ locations: [
+ {
+ line: 2,
+ column: 3,
+ },
+ ],
+ path: ['destroyPackageFile'],
+ },
+ ],
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/utils_spec.js b/spec/frontend/packages_and_registries/package_registry/utils_spec.js
new file mode 100644
index 00000000000..019f94aaec2
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/utils_spec.js
@@ -0,0 +1,23 @@
+import { getPackageTypeLabel } from '~/packages_and_registries/package_registry/utils';
+
+describe('Packages shared utils', () => {
+ describe('getPackageTypeLabel', () => {
+ describe.each`
+ packageType | expectedResult
+ ${'CONAN'} | ${'Conan'}
+ ${'MAVEN'} | ${'Maven'}
+ ${'NPM'} | ${'npm'}
+ ${'NUGET'} | ${'NuGet'}
+ ${'PYPI'} | ${'PyPI'}
+ ${'RUBYGEMS'} | ${'RubyGems'}
+ ${'COMPOSER'} | ${'Composer'}
+ ${'DEBIAN'} | ${'Debian'}
+ ${'HELM'} | ${'Helm'}
+ ${'FOO'} | ${null}
+ `(`package type`, ({ packageType, expectedResult }) => {
+ it(`${packageType} should show as ${expectedResult}`, () => {
+ expect(getPackageTypeLabel(packageType)).toBe(expectedResult);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js b/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js
index 858c7b76ac8..4140b985682 100644
--- a/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js
+++ b/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js
@@ -5,53 +5,53 @@ import initSetHelperText, {
describe('UsageStatistics', () => {
const FIXTURE = 'application_settings/usage.html';
- let usagePingCheckBox;
- let usagePingFeaturesCheckBox;
- let usagePingFeaturesLabel;
- let usagePingFeaturesHelperText;
+ let servicePingCheckBox;
+ let servicePingFeaturesCheckBox;
+ let servicePingFeaturesLabel;
+ let servicePingFeaturesHelperText;
beforeEach(() => {
loadFixtures(FIXTURE);
initSetHelperText();
- usagePingCheckBox = document.getElementById('application_setting_usage_ping_enabled');
- usagePingFeaturesCheckBox = document.getElementById(
+ servicePingCheckBox = document.getElementById('application_setting_usage_ping_enabled');
+ servicePingFeaturesCheckBox = document.getElementById(
'application_setting_usage_ping_features_enabled',
);
- usagePingFeaturesLabel = document.getElementById('service_ping_features_label');
- usagePingFeaturesHelperText = document.getElementById('service_ping_features_helper_text');
+ servicePingFeaturesLabel = document.getElementById('service_ping_features_label');
+ servicePingFeaturesHelperText = document.getElementById('service_ping_features_helper_text');
});
- const expectEnabledUsagePingFeaturesCheckBox = () => {
- expect(usagePingFeaturesCheckBox.classList.contains('gl-cursor-not-allowed')).toBe(false);
- expect(usagePingFeaturesHelperText.textContent).toEqual(HELPER_TEXT_SERVICE_PING_ENABLED);
+ const expectEnabledservicePingFeaturesCheckBox = () => {
+ expect(servicePingFeaturesCheckBox.classList.contains('gl-cursor-not-allowed')).toBe(false);
+ expect(servicePingFeaturesHelperText.textContent).toEqual(HELPER_TEXT_SERVICE_PING_ENABLED);
};
- const expectDisabledUsagePingFeaturesCheckBox = () => {
- expect(usagePingFeaturesLabel.classList.contains('gl-cursor-not-allowed')).toBe(true);
- expect(usagePingFeaturesHelperText.textContent).toEqual(HELPER_TEXT_SERVICE_PING_DISABLED);
+ const expectDisabledservicePingFeaturesCheckBox = () => {
+ expect(servicePingFeaturesLabel.classList.contains('gl-cursor-not-allowed')).toBe(true);
+ expect(servicePingFeaturesHelperText.textContent).toEqual(HELPER_TEXT_SERVICE_PING_DISABLED);
};
describe('Registration Features checkbox', () => {
- it('is disabled when Usage Ping checkbox is unchecked', () => {
- expect(usagePingCheckBox.checked).toBe(false);
- expectDisabledUsagePingFeaturesCheckBox();
+ it('is disabled when Service Ping checkbox is unchecked', () => {
+ expect(servicePingCheckBox.checked).toBe(false);
+ expectDisabledservicePingFeaturesCheckBox();
});
- it('is enabled when Usage Ping checkbox is checked', () => {
- usagePingCheckBox.click();
- expect(usagePingCheckBox.checked).toBe(true);
- expectEnabledUsagePingFeaturesCheckBox();
+ it('is enabled when Servie Ping checkbox is checked', () => {
+ servicePingCheckBox.click();
+ expect(servicePingCheckBox.checked).toBe(true);
+ expectEnabledservicePingFeaturesCheckBox();
});
- it('is switched to disabled when Usage Ping checkbox is unchecked ', () => {
- usagePingCheckBox.click();
- usagePingFeaturesCheckBox.click();
- expectEnabledUsagePingFeaturesCheckBox();
+ it('is switched to disabled when Service Ping checkbox is unchecked ', () => {
+ servicePingCheckBox.click();
+ servicePingFeaturesCheckBox.click();
+ expectEnabledservicePingFeaturesCheckBox();
- usagePingCheckBox.click();
- expect(usagePingCheckBox.checked).toBe(false);
- expect(usagePingFeaturesCheckBox.checked).toBe(false);
- expectDisabledUsagePingFeaturesCheckBox();
+ servicePingCheckBox.click();
+ expect(servicePingCheckBox.checked).toBe(false);
+ expect(servicePingFeaturesCheckBox.checked).toBe(false);
+ expectDisabledservicePingFeaturesCheckBox();
});
});
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 4c253f0610b..1e562419f32 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -1,4 +1,4 @@
-import { GlToggle } from '@gitlab/ui';
+import { GlSprintf, GlToggle } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import projectFeatureSetting from '~/pages/projects/shared/permissions/components/project_feature_setting.vue';
import settingsPanel from '~/pages/projects/shared/permissions/components/settings_panel.vue';
@@ -22,12 +22,11 @@ const defaultProps = {
operationsAccessLevel: 20,
pagesAccessLevel: 10,
analyticsAccessLevel: 20,
- containerRegistryEnabled: true,
+ containerRegistryAccessLevel: 20,
lfsEnabled: true,
emailsDisabled: false,
packagesEnabled: true,
showDefaultAwardEmojis: true,
- allowEditingCommitMessages: false,
},
isGitlabCom: true,
canDisableEmails: true,
@@ -53,7 +52,7 @@ describe('Settings Panel', () => {
let wrapper;
const mountComponent = (
- { currentSettings = {}, glFeatures = {}, ...customProps } = {},
+ { currentSettings = {}, ...customProps } = {},
mountFn = shallowMount,
) => {
const propsData = {
@@ -64,9 +63,6 @@ describe('Settings Panel', () => {
return mountFn(settingsPanel, {
propsData,
- provide: {
- glFeatures,
- },
});
};
@@ -89,8 +85,10 @@ describe('Settings Panel', () => {
const findBuildsAccessLevelInput = () =>
wrapper.find('[name="project[project_feature_attributes][builds_access_level]"]');
const findContainerRegistrySettings = () => wrapper.find({ ref: 'container-registry-settings' });
- const findContainerRegistryEnabledInput = () =>
- wrapper.find('[name="project[container_registry_enabled]"]');
+ const findContainerRegistryPublicNoteGlSprintfComponent = () =>
+ findContainerRegistrySettings().findComponent(GlSprintf);
+ const findContainerRegistryAccessLevelInput = () =>
+ wrapper.find('[name="project[project_feature_attributes][container_registry_access_level]"]');
const findPackageSettings = () => wrapper.find({ ref: 'package-settings' });
const findPackagesEnabledInput = () => wrapper.find('[name="project[packages_enabled]"]');
const findPagesSettings = () => wrapper.find({ ref: 'pages-settings' });
@@ -100,8 +98,6 @@ describe('Settings Panel', () => {
const findShowDefaultAwardEmojis = () =>
wrapper.find('input[name="project[project_setting_attributes][show_default_award_emojis]"]');
const findMetricsVisibilitySettings = () => wrapper.find({ ref: 'metrics-visibility-settings' });
- const findAllowEditingCommitMessages = () =>
- wrapper.find({ ref: 'allow-editing-commit-messages' }).exists();
const findOperationsSettings = () => wrapper.find({ ref: 'operations-settings' });
afterEach(() => {
@@ -281,42 +277,38 @@ describe('Settings Panel', () => {
it('should show the container registry public note if the visibility level is public and the registry is available', () => {
wrapper = mountComponent({
- currentSettings: { visibilityLevel: visibilityOptions.PUBLIC },
- registryAvailable: true,
- });
-
- expect(findContainerRegistrySettings().text()).toContain(
- 'Note: the container registry is always visible when a project is public',
- );
- });
-
- it('should hide the container registry public note if the visibility level is private and the registry is available', () => {
- wrapper = mountComponent({
- currentSettings: { visibilityLevel: visibilityOptions.PRIVATE },
+ currentSettings: {
+ visibilityLevel: visibilityOptions.PUBLIC,
+ containerRegistryAccessLevel: featureAccessLevel.EVERYONE,
+ },
registryAvailable: true,
});
- expect(findContainerRegistrySettings().text()).not.toContain(
- 'Note: the container registry is always visible when a project is public',
+ expect(findContainerRegistryPublicNoteGlSprintfComponent().exists()).toBe(true);
+ expect(findContainerRegistryPublicNoteGlSprintfComponent().attributes('message')).toContain(
+ `Note: The container registry is always visible when a project is public and the container registry is set to '%{access_level_description}'`,
);
});
- it('should enable the container registry input when the repository is enabled', () => {
+ it('should hide the container registry public note if the visibility level is public but the registry is private', () => {
wrapper = mountComponent({
- currentSettings: { repositoryAccessLevel: featureAccessLevel.EVERYONE },
+ currentSettings: {
+ visibilityLevel: visibilityOptions.PUBLIC,
+ containerRegistryAccessLevel: featureAccessLevel.PROJECT_MEMBERS,
+ },
registryAvailable: true,
});
- expect(findContainerRegistryEnabledInput().props('disabled')).toBe(false);
+ expect(findContainerRegistryPublicNoteGlSprintfComponent().exists()).toBe(false);
});
- it('should disable the container registry input when the repository is disabled', () => {
+ it('should hide the container registry public note if the visibility level is private and the registry is available', () => {
wrapper = mountComponent({
- currentSettings: { repositoryAccessLevel: featureAccessLevel.NOT_ENABLED },
+ currentSettings: { visibilityLevel: visibilityOptions.PRIVATE },
registryAvailable: true,
});
- expect(findContainerRegistryEnabledInput().props('disabled')).toBe(true);
+ expect(findContainerRegistryPublicNoteGlSprintfComponent().exists()).toBe(false);
});
it('has label for the toggle', () => {
@@ -325,7 +317,7 @@ describe('Settings Panel', () => {
registryAvailable: true,
});
- expect(findContainerRegistrySettings().findComponent(GlToggle).props('label')).toBe(
+ expect(findContainerRegistryAccessLevelInput().props('label')).toBe(
settingsPanel.i18n.containerRegistryLabel,
);
});
@@ -582,18 +574,6 @@ describe('Settings Panel', () => {
);
});
- describe('Settings panel with feature flags', () => {
- describe('Allow edit of commit message', () => {
- it('should show the allow editing of commit messages checkbox', () => {
- wrapper = mountComponent({
- glFeatures: { allowEditingCommitMessages: true },
- });
-
- expect(findAllowEditingCommitMessages()).toBe(true);
- });
- });
- });
-
describe('Analytics', () => {
it('should show the analytics toggle', () => {
wrapper = mountComponent();
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index f36d6262b5f..082a8977710 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -15,6 +15,8 @@ import {
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+jest.mock('~/emoji');
+
describe('WikiForm', () => {
let wrapper;
let mock;
@@ -350,11 +352,6 @@ describe('WikiForm', () => {
await waitForPromises();
});
- it('editor is shown in a perpetual loading state', () => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
- });
-
it('disables the submit button', () => {
expect(findSubmitButton().props('disabled')).toBe(true);
});
diff --git a/spec/frontend/persistent_user_callout_spec.js b/spec/frontend/persistent_user_callout_spec.js
index 1e51ddf909a..1db255106ed 100644
--- a/spec/frontend/persistent_user_callout_spec.js
+++ b/spec/frontend/persistent_user_callout_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
@@ -166,6 +167,8 @@ describe('PersistentUserCallout', () => {
let mockAxios;
let persistentUserCallout;
+ useMockLocationHelper();
+
beforeEach(() => {
const fixture = createFollowLinkFixture();
const container = fixture.querySelector('.container');
@@ -174,9 +177,6 @@ describe('PersistentUserCallout', () => {
persistentUserCallout = new PersistentUserCallout(container);
jest.spyOn(persistentUserCallout.container, 'remove').mockImplementation(() => {});
-
- delete window.location;
- window.location = { assign: jest.fn() };
});
afterEach(() => {
diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
index b6d49d0d0f8..a95921359cc 100644
--- a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
@@ -44,6 +44,7 @@ describe('Pipeline Status', () => {
const findPipelineCommit = () => wrapper.find('[data-testid="pipeline-commit"]');
const findPipelineErrorMsg = () => wrapper.find('[data-testid="pipeline-error-msg"]');
const findPipelineLoadingMsg = () => wrapper.find('[data-testid="pipeline-loading-msg"]');
+ const findPipelineViewBtn = () => wrapper.find('[data-testid="pipeline-view-btn"]');
beforeEach(() => {
mockPipelineQuery = jest.fn();
@@ -96,11 +97,15 @@ describe('Pipeline Status', () => {
});
it('renders pipeline data', () => {
- const { id } = mockProjectPipeline.pipeline;
+ const {
+ id,
+ detailedStatus: { detailsPath },
+ } = mockProjectPipeline.pipeline;
expect(findCiIcon().exists()).toBe(true);
expect(findPipelineId().text()).toBe(`#${id.match(/\d+/g)[0]}`);
expect(findPipelineCommit().text()).toBe(mockCommitSha);
+ expect(findPipelineViewBtn().attributes('href')).toBe(detailsPath);
});
});
@@ -121,6 +126,7 @@ describe('Pipeline Status', () => {
expect(findCiIcon().exists()).toBe(false);
expect(findPipelineId().exists()).toBe(false);
expect(findPipelineCommit().exists()).toBe(false);
+ expect(findPipelineViewBtn().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js
index 93ebbc648fe..9f910ed4f9c 100644
--- a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js
@@ -1,5 +1,6 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import CodeSnippetAlert from '~/pipeline_editor/components/code_snippet_alert/code_snippet_alert.vue';
import { CODE_SNIPPET_SOURCES } from '~/pipeline_editor/components/code_snippet_alert/constants';
@@ -12,6 +13,10 @@ import {
LOAD_FAILURE_UNKNOWN,
} from '~/pipeline_editor/constants';
+beforeEach(() => {
+ setWindowLocation(TEST_HOST);
+});
+
describe('Pipeline Editor messages', () => {
let wrapper;
@@ -95,9 +100,7 @@ describe('Pipeline Editor messages', () => {
describe('code snippet alert', () => {
const setCodeSnippetUrlParam = (value) => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}/?code_snippet_copied_from=${value}`,
- });
+ setWindowLocation(`${TEST_HOST}/?code_snippet_copied_from=${value}`);
};
it('does not show by default', () => {
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index b0d1a69ee56..0c5c08d7190 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -2,6 +2,7 @@ import { GlAlert, GlButton, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
@@ -348,15 +349,14 @@ describe('Pipeline editor app component', () => {
});
describe('when a template parameter is present in the URL', () => {
- const { location } = window;
+ const originalLocation = window.location.href;
beforeEach(() => {
- delete window.location;
- window.location = new URL('https://localhost?template=Android');
+ setWindowLocation('?template=Android');
});
afterEach(() => {
- window.location = location;
+ setWindowLocation(originalLocation);
});
it('renders the given template', async () => {
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index b0dbba37b94..e0ba6b2e8da 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -20,6 +20,7 @@ describe('Pipelines filtered search', () => {
const findTagToken = () => getSearchToken('tag');
const findUserToken = () => getSearchToken('username');
const findStatusToken = () => getSearchToken('status');
+ const findSourceToken = () => getSearchToken('source');
const createComponent = (params = {}) => {
wrapper = mount(PipelinesFilteredSearch, {
@@ -32,6 +33,8 @@ describe('Pipelines filtered search', () => {
};
beforeEach(() => {
+ window.gon = { features: { pipelineSourceFilter: true } };
+
mock = new MockAdapter(axios);
jest.spyOn(Api, 'projectUsers').mockResolvedValue(users);
@@ -70,6 +73,14 @@ describe('Pipelines filtered search', () => {
operators: OPERATOR_IS_ONLY,
});
+ expect(findSourceToken()).toMatchObject({
+ type: 'source',
+ icon: 'trigger-source',
+ title: 'Source',
+ unique: true,
+ operators: OPERATOR_IS_ONLY,
+ });
+
expect(findStatusToken()).toMatchObject({
type: 'status',
icon: 'status',
diff --git a/spec/frontend/pipelines/graph/graph_component_legacy_spec.js b/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
deleted file mode 100644
index a955572a481..00000000000
--- a/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
+++ /dev/null
@@ -1,300 +0,0 @@
-import { GlLoadingIcon } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { setHTMLFixture } from 'helpers/fixtures';
-import GraphComponentLegacy from '~/pipelines/components/graph/graph_component_legacy.vue';
-import LinkedPipelinesColumnLegacy from '~/pipelines/components/graph/linked_pipelines_column_legacy.vue';
-import StageColumnComponentLegacy from '~/pipelines/components/graph/stage_column_component_legacy.vue';
-import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
-import PipelineStore from '~/pipelines/stores/pipeline_store';
-import linkedPipelineJSON from './linked_pipelines_mock_data';
-import graphJSON from './mock_data_legacy';
-
-describe('graph component', () => {
- let store;
- let mediator;
- let wrapper;
-
- const findExpandPipelineBtn = () => wrapper.find('[data-testid="expand-pipeline-button"]');
- const findAllExpandPipelineBtns = () => wrapper.findAll('[data-testid="expand-pipeline-button"]');
- const findStageColumns = () => wrapper.findAll(StageColumnComponentLegacy);
- const findStageColumnAt = (i) => findStageColumns().at(i);
-
- beforeEach(() => {
- mediator = new PipelinesMediator({ endpoint: '' });
- store = new PipelineStore();
- store.storePipeline(linkedPipelineJSON);
-
- setHTMLFixture('<div class="layout-page"></div>');
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('while is loading', () => {
- it('should render a loading icon', () => {
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: true,
- pipeline: {},
- mediator,
- },
- });
-
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- });
- });
-
- describe('with data', () => {
- beforeEach(() => {
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline: graphJSON,
- mediator,
- },
- });
- });
-
- it('renders the graph', () => {
- expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
- expect(wrapper.find('.loading-icon').exists()).toBe(false);
- expect(wrapper.find('.stage-column-list').exists()).toBe(true);
- });
-
- it('renders columns in the graph', () => {
- expect(findStageColumns()).toHaveLength(graphJSON.details.stages.length);
- });
- });
-
- describe('when linked pipelines are present', () => {
- beforeEach(() => {
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- },
- });
- });
-
- describe('rendered output', () => {
- it('should include the pipelines graph', () => {
- expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
- });
-
- it('should not include the loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- });
-
- it('should include the stage column', () => {
- expect(findStageColumnAt(0).exists()).toBe(true);
- });
-
- it('stage column should have no-margin, gl-mr-26, has-only-one-job classes if there is only one job', () => {
- expect(findStageColumnAt(0).classes()).toEqual(
- expect.arrayContaining(['no-margin', 'gl-mr-26', 'has-only-one-job']),
- );
- });
-
- it('should include the left-margin class on the second child', () => {
- expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
- });
-
- it('should include the left-connector class in the build of the second child', () => {
- expect(findStageColumnAt(1).find('.build:nth-child(1)').classes('left-connector')).toBe(
- true,
- );
- });
-
- it('should include the js-has-linked-pipelines flag', () => {
- expect(wrapper.find('.js-has-linked-pipelines').exists()).toBe(true);
- });
- });
-
- describe('computeds and methods', () => {
- describe('capitalizeStageName', () => {
- it('it capitalizes the stage name', () => {
- expect(wrapper.findAll('.stage-column .stage-name').at(1).text()).toBe('Prebuild');
- });
- });
-
- describe('stageConnectorClass', () => {
- it('it returns left-margin when there is a triggerer', () => {
- expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
- });
- });
- });
-
- describe('linked pipelines components', () => {
- beforeEach(() => {
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- },
- });
- });
-
- it('should render an upstream pipelines column at first position', () => {
- expect(wrapper.find(LinkedPipelinesColumnLegacy).exists()).toBe(true);
- expect(wrapper.find('.stage-column .stage-name').text()).toBe('Upstream');
- });
-
- it('should render a downstream pipelines column at last position', () => {
- const stageColumnNames = wrapper.findAll('.stage-column .stage-name');
-
- expect(wrapper.find(LinkedPipelinesColumnLegacy).exists()).toBe(true);
- expect(stageColumnNames.at(stageColumnNames.length - 1).text()).toBe('Downstream');
- });
-
- describe('triggered by', () => {
- describe('on click', () => {
- it('should emit `onClickUpstreamPipeline` when triggered by linked pipeline is clicked', async () => {
- const btnWrapper = findExpandPipelineBtn();
-
- btnWrapper.trigger('click');
-
- await nextTick();
- expect(wrapper.emitted().onClickUpstreamPipeline).toEqual([
- store.state.pipeline.triggered_by,
- ]);
- });
- });
-
- describe('with expanded pipeline', () => {
- it('should render expanded pipeline', async () => {
- // expand the pipeline
- store.state.pipeline.triggered_by[0].isExpanded = true;
-
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- },
- });
-
- await nextTick();
- expect(wrapper.find('.js-upstream-pipeline-12').exists()).toBe(true);
- });
- });
- });
-
- describe('triggered', () => {
- describe('on click', () => {
- // We have to mock this property of HTMLElement since component relies on it
- let offsetParentDescriptor;
- beforeAll(() => {
- offsetParentDescriptor = Object.getOwnPropertyDescriptor(
- HTMLElement.prototype,
- 'offsetParent',
- );
- Object.defineProperty(HTMLElement.prototype, 'offsetParent', {
- get() {
- return this.parentNode;
- },
- });
- });
- afterAll(() => {
- Object.defineProperty(HTMLElement.prototype, offsetParentDescriptor);
- });
-
- it('should emit `onClickDownstreamPipeline`', async () => {
- const btnWrappers = findAllExpandPipelineBtns();
- const downstreamBtnWrapper = btnWrappers.at(btnWrappers.length - 1);
-
- downstreamBtnWrapper.trigger('click');
-
- await nextTick();
- expect(wrapper.emitted().onClickDownstreamPipeline).toEqual([
- [store.state.pipeline.triggered[1]],
- ]);
- });
- });
-
- describe('with expanded pipeline', () => {
- it('should render expanded pipeline', async () => {
- // expand the pipeline
- store.state.pipeline.triggered[0].isExpanded = true;
-
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- },
- });
-
- await nextTick();
- expect(wrapper.find('.js-downstream-pipeline-34993051')).not.toBeNull();
- });
- });
-
- describe('when column requests a refresh', () => {
- beforeEach(() => {
- findStageColumnAt(0).vm.$emit('refreshPipelineGraph');
- });
-
- it('refreshPipelineGraph is emitted', () => {
- expect(wrapper.emitted().refreshPipelineGraph).toHaveLength(1);
- });
- });
- });
- });
- });
-
- describe('when linked pipelines are not present', () => {
- beforeEach(() => {
- const pipeline = Object.assign(linkedPipelineJSON, { triggered: null, triggered_by: null });
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline,
- mediator,
- },
- });
- });
-
- describe('rendered output', () => {
- it('should include the first column with a no margin', () => {
- const firstColumn = wrapper.find('.stage-column');
-
- expect(firstColumn.classes('no-margin')).toBe(true);
- });
-
- it('should not render a linked pipelines column', () => {
- expect(wrapper.find('.linked-pipelines-column').exists()).toBe(false);
- });
- });
-
- describe('stageConnectorClass', () => {
- it('it returns no-margin when no triggerer and there is one job', () => {
- expect(findStageColumnAt(0).classes('no-margin')).toBe(true);
- });
-
- it('it returns left-margin when no triggerer and not the first stage', () => {
- expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
- });
- });
- });
-
- describe('capitalizeStageName', () => {
- it('capitalizes and escapes stage name', () => {
- wrapper = mount(GraphComponentLegacy, {
- propsData: {
- isLoading: false,
- pipeline: graphJSON,
- mediator,
- },
- });
-
- expect(findStageColumnAt(1).props('title')).toEqual(
- 'Deploy &lt;img src=x onerror=alert(document.domain)&gt;',
- );
- });
- });
-});
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index 30914ba99a5..1fba3823161 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -4,8 +4,8 @@ import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import JobItem from '~/pipelines/components/graph/job_item.vue';
import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import { calculatePipelineLayersInfo } from '~/pipelines/components/graph/utils';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-import { listByLayers } from '~/pipelines/components/parsing_utils';
import {
generateResponse,
mockPipelineResponse,
@@ -150,7 +150,7 @@ describe('graph component', () => {
},
props: {
viewType: LAYER_VIEW,
- pipelineLayers: listByLayers(defaultProps.pipeline),
+ computedPipelineInfo: calculatePipelineLayersInfo(defaultProps.pipeline, 'layer', ''),
},
});
});
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index bb7e27b5ec2..2e8979f2b9d 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -1,11 +1,19 @@
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
import getUserCallouts from '~/graphql_shared/queries/get_user_callouts.query.graphql';
+import axios from '~/lib/utils/axios_utils';
+import {
+ PIPELINES_DETAIL_LINK_DURATION,
+ PIPELINES_DETAIL_LINKS_TOTAL,
+ PIPELINES_DETAIL_LINKS_JOB_RATIO,
+} from '~/performance/constants';
+import * as perfUtils from '~/performance/utils';
import {
IID_FAILURE,
LAYER_VIEW,
@@ -16,8 +24,12 @@ import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import * as Api from '~/pipelines/components/graph_shared/api';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import * as parsingUtils from '~/pipelines/components/parsing_utils';
+import getPipelineHeaderData from '~/pipelines/graphql/queries/get_pipeline_header_data.query.graphql';
+import * as sentryUtils from '~/pipelines/utils';
+import { mockRunningPipelineHeaderData } from '../mock_data';
import { mapCallouts, mockCalloutsResponse, mockPipelineResponse } from './mock_data';
const defaultProvide = {
@@ -72,8 +84,10 @@ describe('Pipeline graph wrapper', () => {
} = {}) => {
const callouts = mapCallouts(calloutsList);
const getUserCalloutsHandler = jest.fn().mockResolvedValue(mockCalloutsResponse(callouts));
+ const getPipelineHeaderDataHandler = jest.fn().mockResolvedValue(mockRunningPipelineHeaderData);
const requestHandlers = [
+ [getPipelineHeaderData, getPipelineHeaderDataHandler],
[getPipelineDetails, getPipelineDetailsHandler],
[getUserCallouts, getUserCalloutsHandler],
];
@@ -111,6 +125,11 @@ describe('Pipeline graph wrapper', () => {
createComponentWithApollo();
expect(getGraph().exists()).toBe(false);
});
+
+ it('skips querying headerPipeline', () => {
+ createComponentWithApollo();
+ expect(wrapper.vm.$apollo.queries.headerPipeline.skip).toBe(true);
+ });
});
describe('when data has loaded', () => {
@@ -190,12 +209,15 @@ describe('Pipeline graph wrapper', () => {
describe('when refresh action is emitted', () => {
beforeEach(async () => {
createComponentWithApollo();
+ jest.spyOn(wrapper.vm.$apollo.queries.headerPipeline, 'refetch');
jest.spyOn(wrapper.vm.$apollo.queries.pipeline, 'refetch');
await wrapper.vm.$nextTick();
getGraph().vm.$emit('refreshPipelineGraph');
});
it('calls refetch', () => {
+ expect(wrapper.vm.$apollo.queries.headerPipeline.skip).toBe(false);
+ expect(wrapper.vm.$apollo.queries.headerPipeline.refetch).toHaveBeenCalled();
expect(wrapper.vm.$apollo.queries.pipeline.refetch).toHaveBeenCalled();
});
});
@@ -245,28 +267,11 @@ describe('Pipeline graph wrapper', () => {
});
describe('view dropdown', () => {
- describe('when pipelineGraphLayersView feature flag is off', () => {
- beforeEach(async () => {
- createComponentWithApollo();
- jest.runOnlyPendingTimers();
- await wrapper.vm.$nextTick();
- });
-
- it('does not appear', () => {
- expect(getViewSelector().exists()).toBe(false);
- });
- });
-
- describe('when pipelineGraphLayersView feature flag is on', () => {
+ describe('default', () => {
let layersFn;
beforeEach(async () => {
layersFn = jest.spyOn(parsingUtils, 'listByLayers');
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
mountFn: mount,
});
@@ -304,14 +309,9 @@ describe('Pipeline graph wrapper', () => {
});
});
- describe('when pipelineGraphLayersView feature flag is on and layers view is selected', () => {
+ describe('when layers view is selected', () => {
beforeEach(async () => {
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
data: {
currentViewType: LAYER_VIEW,
},
@@ -334,14 +334,9 @@ describe('Pipeline graph wrapper', () => {
});
});
- describe('when pipelineGraphLayersView feature flag is on, layers view is selected, and links are active', () => {
+ describe('when layers view is selected, and links are active', () => {
beforeEach(async () => {
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
data: {
currentViewType: LAYER_VIEW,
showLinks: true,
@@ -362,11 +357,6 @@ describe('Pipeline graph wrapper', () => {
describe('when hover tip would otherwise show, but it has been previously dismissed', () => {
beforeEach(async () => {
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
data: {
currentViewType: LAYER_VIEW,
showLinks: true,
@@ -390,11 +380,6 @@ describe('Pipeline graph wrapper', () => {
localStorage.setItem(VIEW_TYPE_KEY, LAYER_VIEW);
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
mountFn: mount,
});
@@ -422,11 +407,6 @@ describe('Pipeline graph wrapper', () => {
localStorage.setItem(VIEW_TYPE_KEY, LAYER_VIEW);
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
mountFn: mount,
getPipelineDetailsHandler: jest.fn().mockResolvedValue(nonNeedsResponse),
});
@@ -450,11 +430,6 @@ describe('Pipeline graph wrapper', () => {
nonNeedsResponse.data.project.pipeline.usesNeeds = false;
createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineGraphLayersView: true,
- },
- },
mountFn: mount,
getPipelineDetailsHandler: jest.fn().mockResolvedValue(nonNeedsResponse),
});
@@ -468,4 +443,112 @@ describe('Pipeline graph wrapper', () => {
});
});
});
+
+ describe('performance metrics', () => {
+ const metricsPath = '/root/project/-/ci/prometheus_metrics/histograms.json';
+ let markAndMeasure;
+ let reportToSentry;
+ let reportPerformance;
+ let mock;
+
+ beforeEach(() => {
+ jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb());
+ markAndMeasure = jest.spyOn(perfUtils, 'performanceMarkAndMeasure');
+ reportToSentry = jest.spyOn(sentryUtils, 'reportToSentry');
+ reportPerformance = jest.spyOn(Api, 'reportPerformance');
+ });
+
+ describe('with no metrics path', () => {
+ beforeEach(async () => {
+ createComponentWithApollo();
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('is not called', () => {
+ expect(markAndMeasure).not.toHaveBeenCalled();
+ expect(reportToSentry).not.toHaveBeenCalled();
+ expect(reportPerformance).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('with metrics path', () => {
+ const duration = 875;
+ const numLinks = 7;
+ const totalGroups = 8;
+ const metricsData = {
+ histograms: [
+ { name: PIPELINES_DETAIL_LINK_DURATION, value: duration / 1000 },
+ { name: PIPELINES_DETAIL_LINKS_TOTAL, value: numLinks },
+ {
+ name: PIPELINES_DETAIL_LINKS_JOB_RATIO,
+ value: numLinks / totalGroups,
+ },
+ ],
+ };
+
+ describe('when no duration is obtained', () => {
+ beforeEach(async () => {
+ jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
+ return [];
+ });
+
+ createComponentWithApollo({
+ provide: {
+ metricsPath,
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ data: {
+ currentViewType: LAYER_VIEW,
+ },
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('attempts to collect metrics', () => {
+ expect(markAndMeasure).toHaveBeenCalled();
+ expect(reportPerformance).not.toHaveBeenCalled();
+ expect(reportToSentry).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('with duration and no error', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onPost(metricsPath).reply(200, {});
+
+ jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
+ return [{ duration }];
+ });
+
+ createComponentWithApollo({
+ provide: {
+ metricsPath,
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ data: {
+ currentViewType: LAYER_VIEW,
+ },
+ });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('it calls reportPerformance with expected arguments', () => {
+ expect(markAndMeasure).toHaveBeenCalled();
+ expect(reportPerformance).toHaveBeenCalled();
+ expect(reportPerformance).toHaveBeenCalledWith(metricsPath, metricsData);
+ expect(reportToSentry).not.toHaveBeenCalled();
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_legacy_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_legacy_spec.js
deleted file mode 100644
index 200e3f48401..00000000000
--- a/spec/frontend/pipelines/graph/linked_pipelines_column_legacy_spec.js
+++ /dev/null
@@ -1,40 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { UPSTREAM } from '~/pipelines/components/graph/constants';
-import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
-import LinkedPipelinesColumnLegacy from '~/pipelines/components/graph/linked_pipelines_column_legacy.vue';
-import mockData from './linked_pipelines_mock_data';
-
-describe('Linked Pipelines Column', () => {
- const propsData = {
- columnTitle: 'Upstream',
- linkedPipelines: mockData.triggered,
- graphPosition: 'right',
- projectId: 19,
- type: UPSTREAM,
- };
- let wrapper;
-
- beforeEach(() => {
- wrapper = shallowMount(LinkedPipelinesColumnLegacy, { propsData });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders the pipeline orientation', () => {
- const titleElement = wrapper.find('.linked-pipelines-column-title');
-
- expect(titleElement.text()).toBe(propsData.columnTitle);
- });
-
- it('renders the correct number of linked pipelines', () => {
- const linkedPipelineElements = wrapper.findAll(LinkedPipeline);
-
- expect(linkedPipelineElements.length).toBe(propsData.linkedPipelines.length);
- });
-
- it('renders cross project triangle when column is upstream', () => {
- expect(wrapper.find('.cross-project-triangle').exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/pipelines/graph/mock_data_legacy.js b/spec/frontend/pipelines/graph/mock_data_legacy.js
deleted file mode 100644
index e1c8b027121..00000000000
--- a/spec/frontend/pipelines/graph/mock_data_legacy.js
+++ /dev/null
@@ -1,261 +0,0 @@
-export default {
- id: 123,
- user: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- },
- active: false,
- coverage: null,
- path: '/root/ci-mock/pipelines/123',
- details: {
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/pipelines/123',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- },
- duration: 9,
- finished_at: '2017-04-19T14:30:27.542Z',
- stages: [
- {
- name: 'test',
- title: 'test: passed',
- groups: [
- {
- name: 'test',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/builds/4153',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4153/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 4153,
- name: 'test',
- build_path: '/root/ci-mock/builds/4153',
- retry_path: '/root/ci-mock/builds/4153/retry',
- playable: false,
- created_at: '2017-04-13T09:25:18.959Z',
- updated_at: '2017-04-13T09:25:23.118Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/builds/4153',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4153/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- ],
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/pipelines/123#test',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- },
- path: '/root/ci-mock/pipelines/123#test',
- dropdown_path: '/root/ci-mock/pipelines/123/stage.json?stage=test',
- },
- {
- name: 'deploy <img src=x onerror=alert(document.domain)>',
- title: 'deploy: passed',
- groups: [
- {
- name: 'deploy to production',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/builds/4166',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4166/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 4166,
- name: 'deploy to production',
- build_path: '/root/ci-mock/builds/4166',
- retry_path: '/root/ci-mock/builds/4166/retry',
- playable: false,
- created_at: '2017-04-19T14:29:46.463Z',
- updated_at: '2017-04-19T14:30:27.498Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/builds/4166',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4166/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'deploy to staging',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/builds/4159',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4159/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 4159,
- name: 'deploy to staging',
- build_path: '/root/ci-mock/builds/4159',
- retry_path: '/root/ci-mock/builds/4159/retry',
- playable: false,
- created_at: '2017-04-18T16:32:08.420Z',
- updated_at: '2017-04-18T16:32:12.631Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/builds/4159',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4159/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- ],
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/pipelines/123#deploy',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- },
- path: '/root/ci-mock/pipelines/123#deploy',
- dropdown_path: '/root/ci-mock/pipelines/123/stage.json?stage=deploy',
- },
- ],
- artifacts: [],
- manual_actions: [
- {
- name: 'deploy to production',
- path: '/root/ci-mock/builds/4166/play',
- playable: false,
- },
- ],
- },
- flags: {
- latest: true,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: false,
- cancelable: false,
- },
- ref: {
- name: 'main',
- path: '/root/ci-mock/tree/main',
- tag: false,
- branch: true,
- },
- commit: {
- id: '798e5f902592192afaba73f4668ae30e56eae492',
- short_id: '798e5f90',
- title: "Merge branch 'new-branch' into 'main'\r",
- created_at: '2017-04-13T10:25:17.000+01:00',
- parent_ids: [
- '54d483b1ed156fbbf618886ddf7ab023e24f8738',
- 'c8e2d38a6c538822e81c57022a6e3a0cfedebbcc',
- ],
- message:
- "Merge branch 'new-branch' into 'main'\r\n\r\nAdd new file\r\n\r\nSee merge request !1",
- author_name: 'Root',
- author_email: 'admin@example.com',
- authored_date: '2017-04-13T10:25:17.000+01:00',
- committer_name: 'Root',
- committer_email: 'admin@example.com',
- committed_date: '2017-04-13T10:25:17.000+01:00',
- author: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- },
- author_gravatar_url: null,
- commit_url:
- 'http://localhost:3000/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492',
- commit_path: '/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492',
- },
- created_at: '2017-04-13T09:25:18.881Z',
- updated_at: '2017-04-19T14:30:27.561Z',
-};
diff --git a/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js b/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js
deleted file mode 100644
index 2965325ea7c..00000000000
--- a/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js
+++ /dev/null
@@ -1,130 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import StageColumnComponentLegacy from '~/pipelines/components/graph/stage_column_component_legacy.vue';
-
-describe('stage column component', () => {
- const mockJob = {
- id: 4250,
- name: 'test',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- details_path: '/root/ci-mock/builds/4250',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4250/retry',
- method: 'post',
- },
- },
- };
-
- let wrapper;
-
- beforeEach(() => {
- const mockGroups = [];
- for (let i = 0; i < 3; i += 1) {
- const mockedJob = { ...mockJob };
- mockedJob.id += i;
- mockGroups.push(mockedJob);
- }
-
- wrapper = shallowMount(StageColumnComponentLegacy, {
- propsData: {
- title: 'foo',
- groups: mockGroups,
- hasTriggeredBy: false,
- },
- });
- });
-
- it('should render provided title', () => {
- expect(wrapper.find('.stage-name').text().trim()).toBe('foo');
- });
-
- it('should render the provided groups', () => {
- expect(wrapper.findAll('.builds-container > ul > li').length).toBe(
- wrapper.props('groups').length,
- );
- });
-
- describe('jobId', () => {
- it('escapes job name', () => {
- wrapper = shallowMount(StageColumnComponentLegacy, {
- propsData: {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- },
- });
-
- expect(wrapper.find('.builds-container li').attributes('id')).toBe(
- 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
- );
- });
- });
-
- describe('with action', () => {
- it('renders action button', () => {
- wrapper = shallowMount(StageColumnComponentLegacy, {
- propsData: {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- action: {
- icon: 'play',
- title: 'Play all',
- path: 'action',
- },
- },
- });
-
- expect(wrapper.find('.js-stage-action').exists()).toBe(true);
- });
- });
-
- describe('without action', () => {
- it('does not render action button', () => {
- wrapper = shallowMount(StageColumnComponentLegacy, {
- propsData: {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- },
- });
-
- expect(wrapper.find('.js-stage-action').exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/pipelines/graph_shared/links_inner_spec.js b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
index 8f39c8c2405..be422fac92c 100644
--- a/spec/frontend/pipelines/graph_shared/links_inner_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
@@ -31,7 +31,7 @@ describe('Links Inner component', () => {
propsData: {
...defaultProps,
...props,
- parsedData: parseData(currentPipelineData.flatMap(({ groups }) => groups)),
+ linksData: parseData(currentPipelineData.flatMap(({ groups }) => groups)).links,
},
});
};
diff --git a/spec/frontend/pipelines/graph_shared/links_layer_spec.js b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
index 932a19f2f00..44ab60cbee7 100644
--- a/spec/frontend/pipelines/graph_shared/links_layer_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
@@ -1,16 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
-import {
- PIPELINES_DETAIL_LINK_DURATION,
- PIPELINES_DETAIL_LINKS_TOTAL,
- PIPELINES_DETAIL_LINKS_JOB_RATIO,
-} from '~/performance/constants';
-import * as perfUtils from '~/performance/utils';
-import * as Api from '~/pipelines/components/graph_shared/api';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-import * as sentryUtils from '~/pipelines/utils';
import { generateResponse, mockPipelineResponse } from '../graph/mock_data';
describe('links layer component', () => {
@@ -94,139 +84,4 @@ describe('links layer component', () => {
expect(findLinksInner().exists()).toBe(false);
});
});
-
- describe('performance metrics', () => {
- const metricsPath = '/root/project/-/ci/prometheus_metrics/histograms.json';
- let markAndMeasure;
- let reportToSentry;
- let reportPerformance;
- let mock;
-
- beforeEach(() => {
- jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb());
- markAndMeasure = jest.spyOn(perfUtils, 'performanceMarkAndMeasure');
- reportToSentry = jest.spyOn(sentryUtils, 'reportToSentry');
- reportPerformance = jest.spyOn(Api, 'reportPerformance');
- });
-
- describe('with no metrics config object', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('is not called', () => {
- expect(markAndMeasure).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- });
- });
-
- describe('with metrics config set to false', () => {
- beforeEach(() => {
- createComponent({
- props: {
- metricsConfig: {
- collectMetrics: false,
- metricsPath: '/path/to/metrics',
- },
- },
- });
- });
-
- it('is not called', () => {
- expect(markAndMeasure).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- });
- });
-
- describe('with no metrics path', () => {
- beforeEach(() => {
- createComponent({
- props: {
- metricsConfig: {
- collectMetrics: true,
- metricsPath: '',
- },
- },
- });
- });
-
- it('is not called', () => {
- expect(markAndMeasure).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- });
- });
-
- describe('with metrics path and collect set to true', () => {
- const duration = 875;
- const numLinks = 7;
- const totalGroups = 8;
- const metricsData = {
- histograms: [
- { name: PIPELINES_DETAIL_LINK_DURATION, value: duration / 1000 },
- { name: PIPELINES_DETAIL_LINKS_TOTAL, value: numLinks },
- {
- name: PIPELINES_DETAIL_LINKS_JOB_RATIO,
- value: numLinks / totalGroups,
- },
- ],
- };
-
- describe('when no duration is obtained', () => {
- beforeEach(() => {
- jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
- return [];
- });
-
- createComponent({
- props: {
- metricsConfig: {
- collectMetrics: true,
- path: metricsPath,
- },
- },
- });
- });
-
- it('attempts to collect metrics', () => {
- expect(markAndMeasure).toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- });
- });
-
- describe('with duration and no error', () => {
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mock.onPost(metricsPath).reply(200, {});
-
- jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
- return [{ duration }];
- });
-
- createComponent({
- props: {
- metricsConfig: {
- collectMetrics: true,
- path: metricsPath,
- },
- },
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('it calls reportPerformance with expected arguments', () => {
- expect(markAndMeasure).toHaveBeenCalled();
- expect(reportPerformance).toHaveBeenCalled();
- expect(reportPerformance).toHaveBeenCalledWith(metricsPath, metricsData);
- expect(reportToSentry).not.toHaveBeenCalled();
- });
- });
- });
- });
});
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 31f0e72c279..e531e26a858 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -99,24 +99,6 @@ describe('Pipeline details header', () => {
);
});
- describe('polling', () => {
- it('is stopped when pipeline is finished', async () => {
- wrapper = createComponent({ ...mockRunningPipelineHeader });
-
- await wrapper.setData({
- pipeline: { ...mockCancelledPipelineHeader },
- });
-
- expect(wrapper.vm.$apollo.queries.pipeline.stopPolling).toHaveBeenCalled();
- });
-
- it('is not stopped when pipeline is not finished', () => {
- wrapper = createComponent();
-
- expect(wrapper.vm.$apollo.queries.pipeline.stopPolling).not.toHaveBeenCalled();
- });
- });
-
describe('actions', () => {
describe('Retry action', () => {
beforeEach(() => {
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js
index 7e3c3727c9d..fdc78d48901 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/pipelines/mock_data.js
@@ -127,6 +127,28 @@ export const mockSuccessfulPipelineHeader = {
},
};
+export const mockRunningPipelineHeaderData = {
+ data: {
+ project: {
+ pipeline: {
+ ...mockRunningPipelineHeader,
+ iid: '28',
+ user: {
+ name: 'Foo',
+ username: 'foobar',
+ webPath: '/foo',
+ email: 'foo@bar.com',
+ avatarUrl: 'link',
+ status: null,
+ __typename: 'UserCore',
+ },
+ __typename: 'Pipeline',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
export const stageReply = {
name: 'deploy',
title: 'deploy: running',
diff --git a/spec/frontend/pipelines/parsing_utils_spec.js b/spec/frontend/pipelines/parsing_utils_spec.js
index 074009ae056..3a270c1c1b5 100644
--- a/spec/frontend/pipelines/parsing_utils_spec.js
+++ b/spec/frontend/pipelines/parsing_utils_spec.js
@@ -120,8 +120,8 @@ describe('DAG visualization parsing utilities', () => {
describe('generateColumnsFromLayersList', () => {
const pipeline = generateResponse(mockPipelineResponse, 'root/fungi-xoxo');
- const layers = listByLayers(pipeline);
- const columns = generateColumnsFromLayersListBare(pipeline, layers);
+ const { pipelineLayers } = listByLayers(pipeline);
+ const columns = generateColumnsFromLayersListBare(pipeline, pipelineLayers);
it('returns stage-like objects with default name, id, and status', () => {
columns.forEach((col, idx) => {
@@ -136,7 +136,7 @@ describe('DAG visualization parsing utilities', () => {
it('creates groups that match the list created in listByLayers', () => {
columns.forEach((col, idx) => {
const groupNames = col.groups.map(({ name }) => name);
- expect(groupNames).toEqual(layers[idx]);
+ expect(groupNames).toEqual(pipelineLayers[idx]);
});
});
diff --git a/spec/frontend/pipelines/pipeline_details_mediator_spec.js b/spec/frontend/pipelines/pipeline_details_mediator_spec.js
deleted file mode 100644
index d6699a43b54..00000000000
--- a/spec/frontend/pipelines/pipeline_details_mediator_spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
-import axios from '~/lib/utils/axios_utils';
-import PipelineMediator from '~/pipelines/pipeline_details_mediator';
-
-describe('PipelineMdediator', () => {
- let mediator;
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mediator = new PipelineMediator({ endpoint: 'foo.json' });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('should set defaults', () => {
- expect(mediator.options).toEqual({ endpoint: 'foo.json' });
- expect(mediator.state.isLoading).toEqual(false);
- expect(mediator.store).toBeDefined();
- expect(mediator.service).toBeDefined();
- });
-
- describe('request and store data', () => {
- it('should store received data', () => {
- mock.onGet('foo.json').reply(200, { id: '121123' });
- mediator.fetchPipeline();
-
- return waitForPromises().then(() => {
- expect(mediator.store.state.pipeline).toEqual({ id: '121123' });
- });
- });
- });
-});
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
index 88b3ef2032a..ce33b6011bf 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -53,6 +53,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findAllArtifactItems = () => wrapper.findAllByTestId(artifactItemTestId);
const findFirstArtifactItem = () => wrapper.findByTestId(artifactItemTestId);
+ const findEmptyMessage = () => wrapper.findByTestId('artifacts-empty-message');
beforeEach(() => {
mockAxios = new MockAdapter(axios);
@@ -86,6 +87,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
createComponent({ mockData: { artifacts } });
expect(findAllArtifactItems()).toHaveLength(artifacts.length);
+ expect(findEmptyMessage().exists()).toBe(false);
});
it('should render the correct artifact name and path', () => {
@@ -95,6 +97,12 @@ describe('Pipeline Multi Actions Dropdown', () => {
expect(findFirstArtifactItem().text()).toBe(`Download ${artifacts[0].name} artifact`);
});
+ it('should render empty message when no artifacts are found', () => {
+ createComponent({ mockData: { artifacts: [] } });
+
+ expect(findEmptyMessage().exists()).toBe(true);
+ });
+
describe('with a failing request', () => {
it('should render an error message', async () => {
const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
diff --git a/spec/frontend/pipelines/pipeline_store_spec.js b/spec/frontend/pipelines/pipeline_store_spec.js
deleted file mode 100644
index 1d5754d1f05..00000000000
--- a/spec/frontend/pipelines/pipeline_store_spec.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import PipelineStore from '~/pipelines/stores/pipeline_store';
-
-describe('Pipeline Store', () => {
- let store;
-
- beforeEach(() => {
- store = new PipelineStore();
- });
-
- it('should set defaults', () => {
- expect(store.state.pipeline).toEqual({});
- });
-
- describe('storePipeline', () => {
- it('should store empty object if none is provided', () => {
- store.storePipeline();
-
- expect(store.state.pipeline).toEqual({});
- });
-
- it('should store received object', () => {
- store.storePipeline({ foo: 'bar' });
-
- expect(store.state.pipeline).toEqual({ foo: 'bar' });
- });
- });
-});
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index 367c7f2b2f6..912b5afe0e1 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -28,6 +28,7 @@ describe('Pipeline Url Component', () => {
flags: {},
},
pipelineScheduleUrl: 'foo',
+ pipelineKey: 'id',
};
const createComponent = (props) => {
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index 2166961cedd..76feaaad1ec 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -4,6 +4,8 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { chunk } from 'lodash';
import { nextTick } from 'vue';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
@@ -40,7 +42,6 @@ const mockPipelineWithStages = mockPipelinesResponse.pipelines.find(
describe('Pipelines', () => {
let wrapper;
let mock;
- let origWindowLocation;
const paths = {
emptyStateSvgPath: '/assets/illustrations/pipelines_empty.svg',
@@ -73,6 +74,7 @@ describe('Pipelines', () => {
const findTablePagination = () => wrapper.findComponent(TablePagination);
const findTab = (tab) => wrapper.findByTestId(`pipelines-tab-${tab}`);
+ const findPipelineKeyDropdown = () => wrapper.findByTestId('pipeline-key-dropdown');
const findRunPipelineButton = () => wrapper.findByTestId('run-pipeline-button');
const findCiLintButton = () => wrapper.findByTestId('ci-lint-button');
const findCleanCacheButton = () => wrapper.findByTestId('clear-cache-button');
@@ -98,20 +100,13 @@ describe('Pipelines', () => {
);
};
- beforeAll(() => {
- origWindowLocation = window.location;
- delete window.location;
- window.location = {
- search: '',
- protocol: 'https:',
- };
- });
-
- afterAll(() => {
- window.location = origWindowLocation;
+ beforeEach(() => {
+ setWindowLocation(TEST_HOST);
});
beforeEach(() => {
+ window.gon = { features: { pipelineSourceFilter: true } };
+
mock = new MockAdapter(axios);
jest.spyOn(window.history, 'pushState');
@@ -536,6 +531,10 @@ describe('Pipelines', () => {
expect(findFilteredSearch().exists()).toBe(true);
});
+ it('renders the pipeline key dropdown', () => {
+ expect(findPipelineKeyDropdown().exists()).toBe(true);
+ });
+
it('renders tab empty state finished scope', async () => {
mock.onGet(mockPipelinesEndpoint, { params: { scope: 'finished', page: '1' } }).reply(200, {
pipelines: [],
@@ -631,6 +630,10 @@ describe('Pipelines', () => {
expect(findFilteredSearch().exists()).toBe(false);
});
+ it('does not render the pipeline key dropdown', () => {
+ expect(findPipelineKeyDropdown().exists()).toBe(false);
+ });
+
it('does not render tabs nor buttons', () => {
expect(findNavigationTabs().exists()).toBe(false);
expect(findTab('all').exists()).toBe(false);
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 68b0dfc018e..4472a5ae70d 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -8,6 +8,7 @@ import PipelineTriggerer from '~/pipelines/components/pipelines_list/pipeline_tr
import PipelineUrl from '~/pipelines/components/pipelines_list/pipeline_url.vue';
import PipelinesTable from '~/pipelines/components/pipelines_list/pipelines_table.vue';
import PipelinesTimeago from '~/pipelines/components/pipelines_list/time_ago.vue';
+import { PipelineKeyOptions } from '~/pipelines/constants';
import eventHub from '~/pipelines/event_hub';
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
@@ -24,6 +25,7 @@ describe('Pipelines Table', () => {
const defaultProps = {
pipelines: [],
viewType: 'root',
+ pipelineKeyOption: PipelineKeyOptions[0],
};
const createMockPipeline = () => {
@@ -80,7 +82,7 @@ describe('Pipelines Table', () => {
it('should render table head with correct columns', () => {
expect(findStatusTh().text()).toBe('Status');
- expect(findPipelineTh().text()).toBe('Pipeline');
+ expect(findPipelineTh().text()).toBe('Pipeline ID');
expect(findTriggererTh().text()).toBe('Triggerer');
expect(findCommitTh().text()).toBe('Commit');
expect(findStagesTh().text()).toBe('Stages');
diff --git a/spec/frontend/pipelines/stores/pipeline_store_spec.js b/spec/frontend/pipelines/stores/pipeline_store_spec.js
deleted file mode 100644
index 2daf7e4b324..00000000000
--- a/spec/frontend/pipelines/stores/pipeline_store_spec.js
+++ /dev/null
@@ -1,135 +0,0 @@
-import PipelineStore from '~/pipelines/stores/pipeline_store';
-import LinkedPipelines from '../linked_pipelines_mock.json';
-
-describe('EE Pipeline store', () => {
- let store;
- let data;
-
- beforeEach(() => {
- store = new PipelineStore();
- data = { ...LinkedPipelines };
-
- store.storePipeline(data);
- });
-
- describe('storePipeline', () => {
- describe('triggered_by', () => {
- it('sets triggered_by as an array', () => {
- expect(store.state.pipeline.triggered_by.length).toEqual(1);
- });
-
- it('adds isExpanding & isLoading keys set to false', () => {
- expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(false);
- expect(store.state.pipeline.triggered_by[0].isLoading).toEqual(false);
- });
-
- it('parses nested triggered_by', () => {
- expect(store.state.pipeline.triggered_by[0].triggered_by.length).toEqual(1);
- expect(store.state.pipeline.triggered_by[0].triggered_by[0].isExpanded).toEqual(false);
- expect(store.state.pipeline.triggered_by[0].triggered_by[0].isLoading).toEqual(false);
- });
- });
-
- describe('triggered', () => {
- it('adds isExpanding & isLoading keys set to false for each triggered pipeline', () => {
- store.state.pipeline.triggered.forEach((pipeline) => {
- expect(pipeline.isExpanded).toEqual(false);
- expect(pipeline.isLoading).toEqual(false);
- });
- });
-
- it('parses nested triggered pipelines', () => {
- store.state.pipeline.triggered[1].triggered.forEach((pipeline) => {
- expect(pipeline.isExpanded).toEqual(false);
- expect(pipeline.isLoading).toEqual(false);
- });
- });
- });
- });
-
- describe('resetTriggeredByPipeline', () => {
- it('closes the pipeline & nested ones', () => {
- store.state.pipeline.triggered_by[0].isExpanded = true;
- store.state.pipeline.triggered_by[0].triggered_by[0].isExpanded = true;
-
- store.resetTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
-
- expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(false);
- expect(store.state.pipeline.triggered_by[0].triggered_by[0].isExpanded).toEqual(false);
- });
- });
-
- describe('openTriggeredByPipeline', () => {
- it('opens the given pipeline', () => {
- store.openTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
-
- expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(true);
- });
- });
-
- describe('closeTriggeredByPipeline', () => {
- it('closes the given pipeline', () => {
- // open it first
- store.openTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
-
- store.closeTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
-
- expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(false);
- });
- });
-
- describe('resetTriggeredPipelines', () => {
- it('closes the pipeline & nested ones', () => {
- store.state.pipeline.triggered[0].isExpanded = true;
- store.state.pipeline.triggered[0].triggered[0].isExpanded = true;
-
- store.resetTriggeredPipelines(store.state.pipeline, store.state.pipeline.triggered[0]);
-
- expect(store.state.pipeline.triggered[0].isExpanded).toEqual(false);
- expect(store.state.pipeline.triggered[0].triggered[0].isExpanded).toEqual(false);
- });
- });
-
- describe('openTriggeredPipeline', () => {
- it('opens the given pipeline', () => {
- store.openTriggeredPipeline(store.state.pipeline, store.state.pipeline.triggered[0]);
-
- expect(store.state.pipeline.triggered[0].isExpanded).toEqual(true);
- });
- });
-
- describe('closeTriggeredPipeline', () => {
- it('closes the given pipeline', () => {
- // open it first
- store.openTriggeredPipeline(store.state.pipeline, store.state.pipeline.triggered[0]);
-
- store.closeTriggeredPipeline(store.state.pipeline, store.state.pipeline.triggered[0]);
-
- expect(store.state.pipeline.triggered[0].isExpanded).toEqual(false);
- });
- });
-
- describe('toggleLoading', () => {
- it('toggles the isLoading property for the given pipeline', () => {
- store.toggleLoading(store.state.pipeline.triggered[0]);
-
- expect(store.state.pipeline.triggered[0].isLoading).toEqual(true);
- });
- });
-
- describe('addExpandedPipelineToRequestData', () => {
- it('pushes the given id to expandedPipelines array', () => {
- store.addExpandedPipelineToRequestData('213231');
-
- expect(store.state.expandedPipelines).toEqual(['213231']);
- });
- });
-
- describe('removeExpandedPipelineToRequestData', () => {
- it('pushes the given id to expandedPipelines array', () => {
- store.removeExpandedPipelineToRequestData('213231');
-
- expect(store.state.expandedPipelines).toEqual([]);
- });
- });
-});
diff --git a/spec/frontend/pipelines/tokens/pipeline_source_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_source_token_spec.js
new file mode 100644
index 00000000000..5d15f0a3c55
--- /dev/null
+++ b/spec/frontend/pipelines/tokens/pipeline_source_token_spec.js
@@ -0,0 +1,50 @@
+import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
+import PipelineSourceToken from '~/pipelines/components/pipelines_list/tokens/pipeline_source_token.vue';
+
+describe('Pipeline Source Token', () => {
+ let wrapper;
+
+ const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
+ const findAllFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion);
+
+ const defaultProps = {
+ config: {
+ type: 'source',
+ icon: 'trigger-source',
+ title: 'Source',
+ unique: true,
+ },
+ value: {
+ data: '',
+ },
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineSourceToken, {
+ propsData: {
+ ...defaultProps,
+ },
+ stubs: {
+ GlFilteredSearchToken: stubComponent(GlFilteredSearchToken, {
+ template: `<div><slot name="suggestions"></slot></div>`,
+ }),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('passes config correctly', () => {
+ expect(findFilteredSearchToken().props('config')).toEqual(defaultProps.config);
+ });
+
+ describe('shows sources correctly', () => {
+ it('renders all pipeline sources available', () => {
+ expect(findAllFilteredSearchSuggestions()).toHaveLength(wrapper.vm.sources.length);
+ });
+ });
+});
diff --git a/spec/frontend/popovers/components/popovers_spec.js b/spec/frontend/popovers/components/popovers_spec.js
index 0c164d97564..25c509346d1 100644
--- a/spec/frontend/popovers/components/popovers_spec.js
+++ b/spec/frontend/popovers/components/popovers_spec.js
@@ -4,7 +4,7 @@ import { useMockMutationObserver } from 'helpers/mock_dom_observer';
import Popovers from '~/popovers/components/popovers.vue';
describe('popovers/components/popovers.vue', () => {
- const { trigger: triggerMutate, observersCount } = useMockMutationObserver();
+ const { trigger: triggerMutate } = useMockMutationObserver();
let wrapper;
const buildWrapper = (...targets) => {
@@ -120,10 +120,13 @@ describe('popovers/components/popovers.vue', () => {
it('disconnects mutation observer on beforeDestroy', async () => {
await buildWrapper(createPopoverTarget());
+ const { observer } = wrapper.vm;
+ jest.spyOn(observer, 'disconnect');
- expect(observersCount()).toBe(1);
+ expect(observer.disconnect).toHaveBeenCalledTimes(0);
wrapper.destroy();
- expect(observersCount()).toBe(0);
+
+ expect(observer.disconnect).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/profile/preferences/components/profile_preferences_spec.js b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
index f1172a73d36..4d2dcf83d3b 100644
--- a/spec/frontend/profile/preferences/components/profile_preferences_spec.js
+++ b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
@@ -1,6 +1,7 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import createFlash from '~/flash';
import IntegrationView from '~/profile/preferences/components/integration_view.vue';
@@ -19,6 +20,8 @@ import {
jest.mock('~/flash');
const expectedUrl = '/foo';
+useMockLocationHelper();
+
describe('ProfilePreferences component', () => {
let wrapper;
const defaultProvide = {
@@ -174,8 +177,6 @@ describe('ProfilePreferences component', () => {
});
describe('theme changes', () => {
- const { location } = window;
-
let themeInput;
let form;
@@ -197,18 +198,6 @@ describe('ProfilePreferences component', () => {
form.dispatchEvent(successEvent);
}
- beforeAll(() => {
- delete window.location;
- window.location = {
- ...location,
- reload: jest.fn(),
- };
- });
-
- afterAll(() => {
- window.location = location;
- });
-
beforeEach(() => {
setupBody();
themeInput = createThemeInput();
diff --git a/spec/frontend/projects/compare/components/app_legacy_spec.js b/spec/frontend/projects/compare/components/app_legacy_spec.js
deleted file mode 100644
index 6fdf4014575..00000000000
--- a/spec/frontend/projects/compare/components/app_legacy_spec.js
+++ /dev/null
@@ -1,159 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import CompareApp from '~/projects/compare/components/app_legacy.vue';
-import RevisionDropdown from '~/projects/compare/components/revision_dropdown_legacy.vue';
-
-jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
-
-const projectCompareIndexPath = 'some/path';
-const refsProjectPath = 'some/refs/path';
-const paramsFrom = 'main';
-const paramsTo = 'some-other-branch';
-
-describe('CompareApp component', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(CompareApp, {
- propsData: {
- projectCompareIndexPath,
- refsProjectPath,
- paramsFrom,
- paramsTo,
- projectMergeRequestPath: '',
- createMrPath: '',
- ...props,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- beforeEach(() => {
- createComponent();
- });
-
- const findSourceDropdown = () => wrapper.find('[data-testid="sourceRevisionDropdown"]');
- const findTargetDropdown = () => wrapper.find('[data-testid="targetRevisionDropdown"]');
-
- it('renders component with prop', () => {
- expect(wrapper.props()).toEqual(
- expect.objectContaining({
- projectCompareIndexPath,
- refsProjectPath,
- paramsFrom,
- paramsTo,
- }),
- );
- });
-
- it('contains the correct form attributes', () => {
- expect(wrapper.attributes('action')).toBe(projectCompareIndexPath);
- expect(wrapper.attributes('method')).toBe('POST');
- });
-
- it('has input with csrf token', () => {
- expect(wrapper.find('input[name="authenticity_token"]').attributes('value')).toBe(
- 'mock-csrf-token',
- );
- });
-
- it('has ellipsis', () => {
- expect(wrapper.find('[data-testid="ellipsis"]').exists()).toBe(true);
- });
-
- describe('Source and Target BranchDropdown components', () => {
- const findAllBranchDropdowns = () => wrapper.findAll(RevisionDropdown);
-
- it('renders the components with the correct props', () => {
- expect(findAllBranchDropdowns().length).toBe(2);
- expect(findSourceDropdown().props('revisionText')).toBe('Source');
- expect(findTargetDropdown().props('revisionText')).toBe('Target');
- });
-
- it('sets the revision when the "selectRevision" event is emitted', async () => {
- findSourceDropdown().vm.$emit('selectRevision', {
- direction: 'to',
- revision: 'some-source-revision',
- });
-
- findTargetDropdown().vm.$emit('selectRevision', {
- direction: 'from',
- revision: 'some-target-revision',
- });
-
- await wrapper.vm.$nextTick();
-
- expect(findTargetDropdown().props('paramsBranch')).toBe('some-target-revision');
- expect(findSourceDropdown().props('paramsBranch')).toBe('some-source-revision');
- });
- });
-
- describe('compare button', () => {
- const findCompareButton = () => wrapper.find(GlButton);
-
- it('renders button', () => {
- expect(findCompareButton().exists()).toBe(true);
- });
-
- it('submits form', () => {
- findCompareButton().vm.$emit('click');
- expect(wrapper.find('form').element.submit).toHaveBeenCalled();
- });
-
- it('has compare text', () => {
- expect(findCompareButton().text()).toBe('Compare');
- });
- });
-
- describe('swap revisions button', () => {
- const findSwapRevisionsButton = () => wrapper.find('[data-testid="swapRevisionsButton"]');
-
- it('renders the swap revisions button', () => {
- expect(findSwapRevisionsButton().exists()).toBe(true);
- });
-
- it('has the correct text', () => {
- expect(findSwapRevisionsButton().text()).toBe('Swap revisions');
- });
-
- it('swaps revisions when clicked', async () => {
- findSwapRevisionsButton().vm.$emit('click');
-
- await wrapper.vm.$nextTick();
-
- expect(findTargetDropdown().props('paramsBranch')).toBe(paramsTo);
- expect(findSourceDropdown().props('paramsBranch')).toBe(paramsFrom);
- });
- });
-
- describe('merge request buttons', () => {
- const findProjectMrButton = () => wrapper.find('[data-testid="projectMrButton"]');
- const findCreateMrButton = () => wrapper.find('[data-testid="createMrButton"]');
-
- it('does not have merge request buttons', () => {
- createComponent();
- expect(findProjectMrButton().exists()).toBe(false);
- expect(findCreateMrButton().exists()).toBe(false);
- });
-
- it('has "View open merge request" button', () => {
- createComponent({
- projectMergeRequestPath: 'some/project/merge/request/path',
- });
- expect(findProjectMrButton().exists()).toBe(true);
- expect(findCreateMrButton().exists()).toBe(false);
- });
-
- it('has "Create merge request" button', () => {
- createComponent({
- createMrPath: 'some/create/create/mr/path',
- });
- expect(findProjectMrButton().exists()).toBe(false);
- expect(findCreateMrButton().exists()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/projects/terraform_notification/terraform_notification_spec.js b/spec/frontend/projects/terraform_notification/terraform_notification_spec.js
index be34b207c4b..71c22998b08 100644
--- a/spec/frontend/projects/terraform_notification/terraform_notification_spec.js
+++ b/spec/frontend/projects/terraform_notification/terraform_notification_spec.js
@@ -5,19 +5,21 @@ import TerraformNotification from '~/projects/terraform_notification/components/
jest.mock('~/lib/utils/common_utils');
-const bannerDissmisedKey = 'terraform_notification_dismissed_for_project_1';
+const terraformImagePath = '/path/to/image';
+const bannerDismissedKey = 'terraform_notification_dismissed';
describe('TerraformNotificationBanner', () => {
let wrapper;
- const propsData = {
- projectId: 1,
+ const provideData = {
+ terraformImagePath,
+ bannerDismissedKey,
};
const findBanner = () => wrapper.findComponent(GlBanner);
beforeEach(() => {
wrapper = shallowMount(TerraformNotification, {
- propsData,
+ provide: provideData,
stubs: { GlBanner },
});
});
@@ -27,19 +29,6 @@ describe('TerraformNotificationBanner', () => {
parseBoolean.mockReturnValue(false);
});
- describe('when the dismiss cookie is set', () => {
- beforeEach(() => {
- parseBoolean.mockReturnValue(true);
- wrapper = shallowMount(TerraformNotification, {
- propsData,
- });
- });
-
- it('should not render the banner', () => {
- expect(findBanner().exists()).toBe(false);
- });
- });
-
describe('when the dismiss cookie is not set', () => {
it('should render the banner', () => {
expect(findBanner().exists()).toBe(true);
@@ -51,8 +40,8 @@ describe('TerraformNotificationBanner', () => {
await findBanner().vm.$emit('close');
});
- it('should set the cookie with the bannerDissmisedKey', () => {
- expect(setCookie).toHaveBeenCalledWith(bannerDissmisedKey, true);
+ it('should set the cookie with the bannerDismissedKey', () => {
+ expect(setCookie).toHaveBeenCalledWith(bannerDismissedKey, true);
});
it('should remove the banner', () => {
diff --git a/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js b/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js
index 8fe659694ba..d2fe5af3a94 100644
--- a/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js
@@ -1,5 +1,6 @@
-import { GlSprintf } from '@gitlab/ui';
+import { GlSprintf, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import component from '~/registry/explorer/components/details_page/delete_modal.vue';
import {
REMOVE_TAG_CONFIRMATION_TEXT,
@@ -12,8 +13,9 @@ import { GlModal } from '../../stubs';
describe('Delete Modal', () => {
let wrapper;
- const findModal = () => wrapper.find(GlModal);
+ const findModal = () => wrapper.findComponent(GlModal);
const findDescription = () => wrapper.find('[data-testid="description"]');
+ const findInputComponent = () => wrapper.findComponent(GlFormInput);
const mountComponent = (propsData) => {
wrapper = shallowMount(component, {
@@ -25,6 +27,13 @@ describe('Delete Modal', () => {
});
};
+ const expectPrimaryActionStatus = (disabled = true) =>
+ expect(findModal().props('actionPrimary')).toMatchObject(
+ expect.objectContaining({
+ attributes: [{ variant: 'danger' }, { disabled }],
+ }),
+ );
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -65,11 +74,49 @@ describe('Delete Modal', () => {
it('has the correct description', () => {
mountComponent({ deleteImage: true });
- expect(wrapper.text()).toContain(DELETE_IMAGE_CONFIRMATION_TEXT);
+ expect(wrapper.text()).toContain(
+ DELETE_IMAGE_CONFIRMATION_TEXT.replace('%{code}', '').trim(),
+ );
+ });
+
+ describe('delete button', () => {
+ const itemsToBeDeleted = [{ project: { path: 'foo' } }];
+
+ it('is disabled by default', () => {
+ mountComponent({ deleteImage: true });
+
+ expectPrimaryActionStatus();
+ });
+
+ it('if the user types something different from the project path is disabled', async () => {
+ mountComponent({ deleteImage: true, itemsToBeDeleted });
+
+ findInputComponent().vm.$emit('input', 'bar');
+
+ await nextTick();
+
+ expectPrimaryActionStatus();
+ });
+
+ it('if the user types the project path it is enabled', async () => {
+ mountComponent({ deleteImage: true, itemsToBeDeleted });
+
+ findInputComponent().vm.$emit('input', 'foo');
+
+ await nextTick();
+
+ expectPrimaryActionStatus(false);
+ });
});
});
describe('when we are deleting tags', () => {
+ it('delete button is enabled', () => {
+ mountComponent();
+
+ expectPrimaryActionStatus(false);
+ });
+
describe('itemsToBeDeleted contains one element', () => {
beforeEach(() => {
mountComponent({ itemsToBeDeleted: [{ path: 'foo' }] });
diff --git a/spec/frontend/registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
index 632f506f4ae..acff5c21940 100644
--- a/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
@@ -1,10 +1,11 @@
-import { GlButton, GlIcon } from '@gitlab/ui';
+import { GlDropdownItem, GlIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { useFakeDate } from 'helpers/fake_date';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
+import { GlDropdown } from 'jest/registry/explorer/stubs';
import component from '~/registry/explorer/components/details_page/details_header.vue';
import {
UNSCHEDULED_STATUS,
@@ -48,8 +49,8 @@ describe('Details Header', () => {
const findTitle = () => findByTestId('title');
const findTagsCount = () => findByTestId('tags-count');
const findCleanup = () => findByTestId('cleanup');
- const findDeleteButton = () => wrapper.find(GlButton);
- const findInfoIcon = () => wrapper.find(GlIcon);
+ const findDeleteButton = () => wrapper.findComponent(GlDropdownItem);
+ const findInfoIcon = () => wrapper.findComponent(GlIcon);
const waitForMetadataItems = async () => {
// Metadata items are printed by a loop in the title-area and it takes two ticks for them to be available
@@ -84,6 +85,8 @@ describe('Details Header', () => {
mocks,
stubs: {
TitleArea,
+ GlDropdown,
+ GlDropdownItem,
},
});
};
@@ -152,10 +155,11 @@ describe('Details Header', () => {
it('has the correct props', () => {
mountComponent();
- expect(findDeleteButton().props()).toMatchObject({
- variant: 'danger',
- disabled: false,
- });
+ expect(findDeleteButton().attributes()).toMatchObject(
+ expect.objectContaining({
+ variant: 'danger',
+ }),
+ );
});
it('emits the correct event', () => {
@@ -168,16 +172,16 @@ describe('Details Header', () => {
it.each`
canDelete | disabled | isDisabled
- ${true} | ${false} | ${false}
- ${true} | ${true} | ${true}
- ${false} | ${false} | ${true}
- ${false} | ${true} | ${true}
+ ${true} | ${false} | ${undefined}
+ ${true} | ${true} | ${'true'}
+ ${false} | ${false} | ${'true'}
+ ${false} | ${true} | ${'true'}
`(
'when canDelete is $canDelete and disabled is $disabled is $isDisabled that the button is disabled',
({ canDelete, disabled, isDisabled }) => {
mountComponent({ propsData: { image: { ...defaultImage, canDelete }, disabled } });
- expect(findDeleteButton().props('disabled')).toBe(isDisabled);
+ expect(findDeleteButton().attributes('disabled')).toBe(isDisabled);
},
);
});
diff --git a/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js b/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js
index c89bb874a7f..8f2c049a357 100644
--- a/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js
@@ -2,7 +2,7 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CleanupStatus from '~/registry/explorer/components/list_page/cleanup_status.vue';
import {
- ASYNC_DELETE_IMAGE_ERROR_MESSAGE,
+ CLEANUP_TIMED_OUT_ERROR_MESSAGE,
CLEANUP_STATUS_SCHEDULED,
CLEANUP_STATUS_ONGOING,
CLEANUP_STATUS_UNFINISHED,
@@ -81,7 +81,7 @@ describe('cleanup_status', () => {
const tooltip = getBinding(findExtraInfoIcon().element, 'gl-tooltip');
- expect(tooltip.value.title).toBe(ASYNC_DELETE_IMAGE_ERROR_MESSAGE);
+ expect(tooltip.value.title).toBe(CLEANUP_TIMED_OUT_ERROR_MESSAGE);
});
});
});
diff --git a/spec/frontend/registry/explorer/mock_data.js b/spec/frontend/registry/explorer/mock_data.js
index 27246cf2364..6a835a28807 100644
--- a/spec/frontend/registry/explorer/mock_data.js
+++ b/spec/frontend/registry/explorer/mock_data.js
@@ -119,6 +119,7 @@ export const containerRepositoryMock = {
expirationPolicyCleanupStatus: 'UNSCHEDULED',
project: {
visibility: 'public',
+ path: 'gitlab-test',
containerExpirationPolicy: {
enabled: false,
nextRunAt: '2020-11-27T08:59:27Z',
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 022f6e71fe6..21af9dcc60f 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -335,7 +335,7 @@ describe('Details Page', () => {
describe('Partial Cleanup Alert', () => {
const config = {
runCleanupPoliciesHelpPagePath: 'foo',
- cleanupPoliciesHelpPagePath: 'bar',
+ expirationPolicyHelpPagePath: 'bar',
userCalloutsPath: 'call_out_path',
userCalloutId: 'call_out_id',
showUnfinishedTagCleanupCallout: true,
@@ -367,7 +367,7 @@ describe('Details Page', () => {
expect(findPartialCleanupAlert().props()).toEqual({
runCleanupPoliciesHelpPagePath: config.runCleanupPoliciesHelpPagePath,
- cleanupPoliciesHelpPagePath: config.cleanupPoliciesHelpPagePath,
+ cleanupPoliciesHelpPagePath: config.expirationPolicyHelpPagePath,
});
});
diff --git a/spec/frontend/registry/explorer/stubs.js b/spec/frontend/registry/explorer/stubs.js
index d6fba863ee0..4f65e73d3fa 100644
--- a/spec/frontend/registry/explorer/stubs.js
+++ b/spec/frontend/registry/explorer/stubs.js
@@ -2,6 +2,7 @@ import {
GlModal as RealGlModal,
GlEmptyState as RealGlEmptyState,
GlSkeletonLoader as RealGlSkeletonLoader,
+ GlDropdown as RealGlDropdown,
} from '@gitlab/ui';
import { RouterLinkStub } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
@@ -38,3 +39,7 @@ export const ListItem = {
};
},
};
+
+export const GlDropdown = stubComponent(RealGlDropdown, {
+ template: '<div><slot></slot></div>',
+});
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 748b48dacaa..1db6fa21d6b 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -4,6 +4,7 @@ import MockAdapter from 'axios-mock-adapter';
import { merge } from 'lodash';
import Vuex from 'vuex';
import { getJSONFixture } from 'helpers/fixtures';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as commonUtils from '~/lib/utils/common_utils';
import ReleaseEditNewApp from '~/releases/components/app_edit_new.vue';
@@ -77,7 +78,7 @@ describe('Release edit/new component', () => {
};
beforeEach(() => {
- global.jsdom.reconfigure({ url: TEST_HOST });
+ setWindowLocation(TEST_HOST);
mock = new MockAdapter(axios);
gon.api_version = 'v4';
@@ -164,9 +165,7 @@ describe('Release edit/new component', () => {
`when the URL contains a "${BACK_URL_PARAM}=$backUrl" parameter`,
({ backUrl, expectedHref }) => {
beforeEach(async () => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}?${BACK_URL_PARAM}=${encodeURIComponent(backUrl)}`,
- });
+ setWindowLocation(`${TEST_HOST}?${BACK_URL_PARAM}=${encodeURIComponent(backUrl)}`);
await factory();
});
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
index 0f6657090e6..47fd6377fcf 100644
--- a/spec/frontend/releases/components/release_block_header_spec.js
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -2,6 +2,7 @@ import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { merge } from 'lodash';
import { getJSONFixture } from 'helpers/fixtures';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
import { BACK_URL_PARAM } from '~/releases/constants';
@@ -60,12 +61,7 @@ describe('Release block header', () => {
const currentUrl = 'https://example.gitlab.com/path';
beforeEach(() => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value: {
- href: currentUrl,
- },
- });
+ setWindowLocation(currentUrl);
factory();
});
diff --git a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
index b8299d44f13..84863eac3d3 100644
--- a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
+++ b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
@@ -3,6 +3,7 @@ import Vuex from 'vuex';
import CodequalityIssueBody from '~/reports/codequality_report/components/codequality_issue_body.vue';
import GroupedCodequalityReportsApp from '~/reports/codequality_report/grouped_codequality_reports_app.vue';
import { getStoreConfig } from '~/reports/codequality_report/store';
+import { STATUS_NOT_FOUND } from '~/reports/constants';
import { parsedReportIssues } from './mock_data';
const localVue = createLocalVue();
@@ -14,8 +15,6 @@ describe('Grouped code quality reports app', () => {
const PATHS = {
codequalityHelpPath: 'codequality_help.html',
- basePath: 'base.json',
- headPath: 'head.json',
baseBlobPath: 'base/blob/path/',
headBlobPath: 'head/blob/path/',
};
@@ -127,21 +126,6 @@ describe('Grouped code quality reports app', () => {
});
});
- describe('when there is a head report but no base report', () => {
- beforeEach(() => {
- mockStore.state.basePath = null;
- mockStore.state.hasError = true;
- });
-
- it('renders error text', () => {
- expect(findWidget().text()).toContain('Failed to load codeclimate report');
- });
-
- it('renders a help icon with more information', () => {
- expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(true);
- });
- });
-
describe('on error', () => {
beforeEach(() => {
mockStore.state.hasError = true;
@@ -154,5 +138,15 @@ describe('Grouped code quality reports app', () => {
it('does not render a help icon', () => {
expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(false);
});
+
+ describe('when base report was not found', () => {
+ beforeEach(() => {
+ mockStore.state.status = STATUS_NOT_FOUND;
+ });
+
+ it('renders a help icon with more information', () => {
+ expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(true);
+ });
+ });
});
});
diff --git a/spec/frontend/reports/codequality_report/store/actions_spec.js b/spec/frontend/reports/codequality_report/store/actions_spec.js
index 9dda024bffd..1821390786b 100644
--- a/spec/frontend/reports/codequality_report/store/actions_spec.js
+++ b/spec/frontend/reports/codequality_report/store/actions_spec.js
@@ -5,8 +5,14 @@ import axios from '~/lib/utils/axios_utils';
import createStore from '~/reports/codequality_report/store';
import * as actions from '~/reports/codequality_report/store/actions';
import * as types from '~/reports/codequality_report/store/mutation_types';
+import { STATUS_NOT_FOUND } from '~/reports/constants';
import { reportIssues, parsedReportIssues } from '../mock_data';
+const pollInterval = 123;
+const pollIntervalHeader = {
+ 'Poll-Interval': pollInterval,
+};
+
describe('Codequality Reports actions', () => {
let localState;
let localStore;
@@ -19,8 +25,6 @@ describe('Codequality Reports actions', () => {
describe('setPaths', () => {
it('should commit SET_PATHS mutation', (done) => {
const paths = {
- basePath: 'basePath',
- headPath: 'headPath',
baseBlobPath: 'baseBlobPath',
headBlobPath: 'headBlobPath',
reportsPath: 'reportsPath',
@@ -39,11 +43,11 @@ describe('Codequality Reports actions', () => {
});
describe('fetchReports', () => {
+ const endpoint = `${TEST_HOST}/codequality_reports.json`;
let mock;
beforeEach(() => {
- localState.reportsPath = `${TEST_HOST}/codequality_reports.json`;
- localState.basePath = '/base/path';
+ localState.reportsPath = endpoint;
mock = new MockAdapter(axios);
});
@@ -53,7 +57,7 @@ describe('Codequality Reports actions', () => {
describe('on success', () => {
it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
- mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, reportIssues);
+ mock.onGet(endpoint).reply(200, reportIssues);
testAction(
actions.fetchReports,
@@ -73,7 +77,7 @@ describe('Codequality Reports actions', () => {
describe('on error', () => {
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(500);
+ mock.onGet(endpoint).reply(500);
testAction(
actions.fetchReports,
@@ -86,20 +90,78 @@ describe('Codequality Reports actions', () => {
});
});
- describe('with no base path', () => {
+ describe('when base report is not found', () => {
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- localState.basePath = null;
+ const data = { status: STATUS_NOT_FOUND };
+ mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, data);
testAction(
actions.fetchReports,
null,
localState,
[{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError' }],
+ [{ type: 'receiveReportsError', payload: data }],
done,
);
});
});
+
+ describe('while waiting for report results', () => {
+ it('continues polling until it receives data', (done) => {
+ mock
+ .onGet(endpoint)
+ .replyOnce(204, undefined, pollIntervalHeader)
+ .onGet(endpoint)
+ .reply(200, reportIssues);
+
+ Promise.all([
+ testAction(
+ actions.fetchReports,
+ null,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [
+ {
+ payload: parsedReportIssues,
+ type: 'receiveReportsSuccess',
+ },
+ ],
+ done,
+ ),
+ axios
+ // wait for initial NO_CONTENT response to be fulfilled
+ .waitForAll()
+ .then(() => {
+ jest.advanceTimersByTime(pollInterval);
+ }),
+ ]).catch(done.fail);
+ });
+
+ it('continues polling until it receives an error', (done) => {
+ mock
+ .onGet(endpoint)
+ .replyOnce(204, undefined, pollIntervalHeader)
+ .onGet(endpoint)
+ .reply(500);
+
+ Promise.all([
+ testAction(
+ actions.fetchReports,
+ null,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [{ type: 'receiveReportsError', payload: expect.any(Error) }],
+ done,
+ ),
+ axios
+ // wait for initial NO_CONTENT response to be fulfilled
+ .waitForAll()
+ .then(() => {
+ jest.advanceTimersByTime(pollInterval);
+ }),
+ ]).catch(done.fail);
+ });
+ });
});
describe('receiveReportsSuccess', () => {
diff --git a/spec/frontend/reports/codequality_report/store/getters_spec.js b/spec/frontend/reports/codequality_report/store/getters_spec.js
index de025f814ef..0378171084d 100644
--- a/spec/frontend/reports/codequality_report/store/getters_spec.js
+++ b/spec/frontend/reports/codequality_report/store/getters_spec.js
@@ -1,6 +1,6 @@
import createStore from '~/reports/codequality_report/store';
import * as getters from '~/reports/codequality_report/store/getters';
-import { LOADING, ERROR, SUCCESS } from '~/reports/constants';
+import { LOADING, ERROR, SUCCESS, STATUS_NOT_FOUND } from '~/reports/constants';
describe('Codequality reports store getters', () => {
let localState;
@@ -76,10 +76,9 @@ describe('Codequality reports store getters', () => {
});
describe('codequalityPopover', () => {
- describe('when head report is available but base report is not', () => {
+ describe('when base report is not available', () => {
it('returns a popover with a documentation link', () => {
- localState.headPath = 'head.json';
- localState.basePath = undefined;
+ localState.status = STATUS_NOT_FOUND;
localState.helpPath = 'codequality_help.html';
expect(getters.codequalityPopover(localState).title).toEqual(
diff --git a/spec/frontend/reports/codequality_report/store/mutations_spec.js b/spec/frontend/reports/codequality_report/store/mutations_spec.js
index 8bc6bb26c2a..6e14cd7438b 100644
--- a/spec/frontend/reports/codequality_report/store/mutations_spec.js
+++ b/spec/frontend/reports/codequality_report/store/mutations_spec.js
@@ -1,5 +1,6 @@
import createStore from '~/reports/codequality_report/store';
import mutations from '~/reports/codequality_report/store/mutations';
+import { STATUS_NOT_FOUND } from '~/reports/constants';
describe('Codequality Reports mutations', () => {
let localState;
@@ -12,24 +13,18 @@ describe('Codequality Reports mutations', () => {
describe('SET_PATHS', () => {
it('sets paths to given values', () => {
- const basePath = 'base.json';
- const headPath = 'head.json';
const baseBlobPath = 'base/blob/path/';
const headBlobPath = 'head/blob/path/';
const reportsPath = 'reports.json';
const helpPath = 'help.html';
mutations.SET_PATHS(localState, {
- basePath,
- headPath,
baseBlobPath,
headBlobPath,
reportsPath,
helpPath,
});
- expect(localState.basePath).toEqual(basePath);
- expect(localState.headPath).toEqual(headPath);
expect(localState.baseBlobPath).toEqual(baseBlobPath);
expect(localState.headBlobPath).toEqual(headBlobPath);
expect(localState.reportsPath).toEqual(reportsPath);
@@ -58,9 +53,10 @@ describe('Codequality Reports mutations', () => {
expect(localState.hasError).toEqual(false);
});
- it('clears statusReason', () => {
+ it('clears status and statusReason', () => {
mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
+ expect(localState.status).toEqual('');
expect(localState.statusReason).toEqual('');
});
@@ -86,6 +82,13 @@ describe('Codequality Reports mutations', () => {
expect(localState.hasError).toEqual(true);
});
+ it('sets status based on error object', () => {
+ const error = { status: STATUS_NOT_FOUND };
+ mutations.RECEIVE_REPORTS_ERROR(localState, error);
+
+ expect(localState.status).toEqual(error.status);
+ });
+
it('sets statusReason to string from error response data', () => {
const data = { status_reason: 'This merge request does not have codequality reports' };
const error = { response: { data } };
diff --git a/spec/frontend/repository/components/blob_button_group_spec.js b/spec/frontend/repository/components/blob_button_group_spec.js
index a449fd6f06c..f2a3354f204 100644
--- a/spec/frontend/repository/components/blob_button_group_spec.js
+++ b/spec/frontend/repository/components/blob_button_group_spec.js
@@ -12,6 +12,9 @@ const DEFAULT_PROPS = {
replacePath: 'some/replace/path',
deletePath: 'some/delete/path',
emptyRepo: false,
+ projectPath: 'some/project/path',
+ isLocked: false,
+ canLock: true,
};
const DEFAULT_INJECT = {
@@ -43,7 +46,7 @@ describe('BlobButtonGroup component', () => {
const findDeleteBlobModal = () => wrapper.findComponent(DeleteBlobModal);
const findUploadBlobModal = () => wrapper.findComponent(UploadBlobModal);
- const findReplaceButton = () => wrapper.findAll(GlButton).at(0);
+ const findReplaceButton = () => wrapper.find('[data-testid="replace"]');
it('renders component', () => {
createComponent();
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index a83d0a607f2..d462995328b 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -20,6 +20,8 @@ import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
jest.mock('~/repository/components/blob_viewers');
let wrapper;
+let mockResolver;
+
const simpleMockData = {
name: 'some_file.js',
size: 123,
@@ -37,9 +39,6 @@ const simpleMockData = {
externalStorageUrl: 'some_file.js',
replacePath: 'some_file.js/replace',
deletePath: 'some_file.js/delete',
- canLock: true,
- isLocked: false,
- lockLink: 'some_file.js/lock',
forkPath: 'some_file.js/fork',
simpleViewer: {
fileType: 'text',
@@ -62,6 +61,7 @@ const richMockData = {
const projectMockData = {
userPermissions: {
pushCode: true,
+ downloadCode: true,
},
repository: {
empty: false,
@@ -71,17 +71,28 @@ const projectMockData = {
const localVue = createLocalVue();
const mockAxios = new MockAdapter(axios);
-const createComponentWithApollo = (mockData = {}) => {
+const createComponentWithApollo = (mockData = {}, inject = {}) => {
localVue.use(VueApollo);
const defaultPushCode = projectMockData.userPermissions.pushCode;
+ const defaultDownloadCode = projectMockData.userPermissions.downloadCode;
const defaultEmptyRepo = projectMockData.repository.empty;
- const { blobs, emptyRepo = defaultEmptyRepo, canPushCode = defaultPushCode } = mockData;
-
- const mockResolver = jest.fn().mockResolvedValue({
+ const {
+ blobs,
+ emptyRepo = defaultEmptyRepo,
+ canPushCode = defaultPushCode,
+ canDownloadCode = defaultDownloadCode,
+ pathLocks = [],
+ } = mockData;
+
+ mockResolver = jest.fn().mockResolvedValue({
data: {
project: {
- userPermissions: { pushCode: canPushCode },
+ id: '1234',
+ userPermissions: { pushCode: canPushCode, downloadCode: canDownloadCode },
+ pathLocks: {
+ nodes: pathLocks,
+ },
repository: {
empty: emptyRepo,
blobs: {
@@ -101,6 +112,14 @@ const createComponentWithApollo = (mockData = {}) => {
path: 'some_file.js',
projectPath: 'some/path',
},
+ mixins: [
+ {
+ data: () => ({ ref: 'default-ref' }),
+ },
+ ],
+ provide: {
+ ...inject,
+ },
});
};
@@ -119,6 +138,7 @@ const createFactory = (mountFn) => (
queries: {
project: {
loading,
+ refetch: jest.fn(),
},
},
},
@@ -298,6 +318,7 @@ describe('Blob content viewer component', () => {
expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
webIdePath: ideEditPath,
+ showEditButton: true,
});
});
@@ -315,10 +336,11 @@ describe('Blob content viewer component', () => {
expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
webIdePath: ideEditPath,
+ showEditButton: true,
});
});
- it('does not render BlobHeaderEdit button when viewing a binary file', async () => {
+ it('renders BlobHeaderEdit button for binary files', async () => {
fullFactory({
mockData: { blobInfo: richMockData, isBinary: true },
stubs: {
@@ -329,13 +351,36 @@ describe('Blob content viewer component', () => {
await nextTick();
- expect(findBlobEdit().exists()).toBe(false);
+ expect(findBlobEdit().props()).toMatchObject({
+ editPath: editBlobPath,
+ webIdePath: ideEditPath,
+ showEditButton: false,
+ });
+ });
+
+ describe('blob header binary file', () => {
+ it.each([richMockData, { simpleViewer: { fileType: 'download' } }])(
+ 'passes the correct isBinary value when viewing a binary file',
+ async (blobInfo) => {
+ fullFactory({
+ mockData: {
+ blobInfo,
+ isBinary: true,
+ },
+ stubs: { BlobContent: true, BlobReplace: true },
+ });
+
+ await nextTick();
+
+ expect(findBlobHeader().props('isBinary')).toBe(true);
+ },
+ );
});
describe('BlobButtonGroup', () => {
const { name, path, replacePath, webPath } = simpleMockData;
const {
- userPermissions: { pushCode },
+ userPermissions: { pushCode, downloadCode },
repository: { empty },
} = projectMockData;
@@ -345,7 +390,7 @@ describe('Blob content viewer component', () => {
fullFactory({
mockData: {
blobInfo: simpleMockData,
- project: { userPermissions: { pushCode }, repository: { empty } },
+ project: { userPermissions: { pushCode, downloadCode }, repository: { empty } },
},
stubs: {
BlobContent: true,
@@ -361,10 +406,37 @@ describe('Blob content viewer component', () => {
replacePath,
deletePath: webPath,
canPushCode: pushCode,
+ canLock: true,
+ isLocked: false,
emptyRepo: empty,
});
});
+ it.each`
+ canPushCode | canDownloadCode | canLock
+ ${true} | ${true} | ${true}
+ ${false} | ${true} | ${false}
+ ${true} | ${false} | ${false}
+ `('passes the correct lock states', async ({ canPushCode, canDownloadCode, canLock }) => {
+ fullFactory({
+ mockData: {
+ blobInfo: simpleMockData,
+ project: {
+ userPermissions: { pushCode: canPushCode, downloadCode: canDownloadCode },
+ repository: { empty },
+ },
+ },
+ stubs: {
+ BlobContent: true,
+ BlobButtonGroup: true,
+ },
+ });
+
+ await nextTick();
+
+ expect(findBlobButtonGroup().props('canLock')).toBe(canLock);
+ });
+
it('does not render if not logged in', async () => {
window.gon.current_user_id = null;
@@ -382,4 +454,32 @@ describe('Blob content viewer component', () => {
});
});
});
+
+ describe('blob info query', () => {
+ it('is called with originalBranch value if the prop has a value', async () => {
+ const inject = { originalBranch: 'some-branch' };
+ createComponentWithApollo({ blobs: simpleMockData }, inject);
+
+ await waitForPromises();
+
+ expect(mockResolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ ref: 'some-branch',
+ }),
+ );
+ });
+
+ it('is called with ref value if the originalBranch prop has no value', async () => {
+ const inject = { originalBranch: null };
+ createComponentWithApollo({ blobs: simpleMockData }, inject);
+
+ await waitForPromises();
+
+ expect(mockResolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ ref: 'default-ref',
+ }),
+ );
+ });
+ });
});
diff --git a/spec/frontend/repository/components/blob_edit_spec.js b/spec/frontend/repository/components/blob_edit_spec.js
index e6e69cd8549..11739674bc9 100644
--- a/spec/frontend/repository/components/blob_edit_spec.js
+++ b/spec/frontend/repository/components/blob_edit_spec.js
@@ -6,6 +6,7 @@ import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
const DEFAULT_PROPS = {
editPath: 'some_file.js/edit',
webIdePath: 'some_file.js/ide/edit',
+ showEditButton: true,
};
describe('BlobEdit component', () => {
@@ -31,8 +32,8 @@ describe('BlobEdit component', () => {
});
const findButtons = () => wrapper.findAll(GlButton);
- const findEditButton = () => findButtons().at(0);
- const findWebIdeButton = () => findButtons().at(1);
+ const findEditButton = () => wrapper.find('[data-testid="edit"]');
+ const findWebIdeButton = () => wrapper.find('[data-testid="web-ide"]');
const findWebIdeLink = () => wrapper.find(WebIdeLink);
it('renders component', () => {
@@ -77,6 +78,23 @@ describe('BlobEdit component', () => {
editUrl,
webIdeUrl,
isBlob: true,
+ showEditButton: true,
+ });
+ });
+
+ describe('Without Edit button', () => {
+ const showEditButton = false;
+
+ it('renders WebIdeLink component without an edit button', () => {
+ createComponent(true, { showEditButton });
+
+ expect(findWebIdeLink().props()).toMatchObject({ showEditButton });
+ });
+
+ it('does not render an Edit button', () => {
+ createComponent(false, { showEditButton });
+
+ expect(findEditButton().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 93bfd3d9d32..0733cffe4f4 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -3,10 +3,14 @@ import { shallowMount, RouterLinkStub } from '@vue/test-utils';
import Breadcrumbs from '~/repository/components/breadcrumbs.vue';
import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
+const defaultMockRoute = {
+ name: 'blobPath',
+};
+
describe('Repository breadcrumbs component', () => {
let wrapper;
- const factory = (currentPath, extraProps = {}) => {
+ const factory = (currentPath, extraProps = {}, mockRoute = {}) => {
const $apollo = {
queries: {
userPermissions: {
@@ -23,7 +27,13 @@ describe('Repository breadcrumbs component', () => {
stubs: {
RouterLink: RouterLinkStub,
},
- mocks: { $apollo },
+ mocks: {
+ $route: {
+ defaultMockRoute,
+ ...mockRoute,
+ },
+ $apollo,
+ },
});
};
@@ -69,6 +79,21 @@ describe('Repository breadcrumbs component', () => {
expect(wrapper.find(GlDropdown).exists()).toBe(false);
});
+ it.each`
+ routeName | isRendered
+ ${'blobPath'} | ${false}
+ ${'blobPathDecoded'} | ${false}
+ ${'treePath'} | ${true}
+ ${'treePathDecoded'} | ${true}
+ ${'projectRoot'} | ${true}
+ `(
+ 'does render add to tree dropdown $isRendered when route is $routeName',
+ ({ routeName, isRendered }) => {
+ factory('app/assets/javascripts.js', { canCollaborate: true }, { name: routeName });
+ expect(wrapper.find(GlDropdown).exists()).toBe(isRendered);
+ },
+ );
+
it('renders add to tree dropdown when permissions are true', async () => {
factory('/', { canCollaborate: true });
diff --git a/spec/frontend/repository/components/delete_blob_modal_spec.js b/spec/frontend/repository/components/delete_blob_modal_spec.js
index a74e3e6d325..2c62868f391 100644
--- a/spec/frontend/repository/components/delete_blob_modal_spec.js
+++ b/spec/frontend/repository/components/delete_blob_modal_spec.js
@@ -1,5 +1,5 @@
-import { GlFormTextarea, GlModal, GlFormInput, GlToggle } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlFormTextarea, GlModal, GlFormInput, GlToggle, GlForm } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import DeleteBlobModal from '~/repository/components/delete_blob_modal.vue';
@@ -19,17 +19,34 @@ const initialProps = {
describe('DeleteBlobModal', () => {
let wrapper;
- const createComponent = (props = {}) => {
- wrapper = shallowMount(DeleteBlobModal, {
+ const createComponentFactory = (mountFn) => (props = {}) => {
+ wrapper = mountFn(DeleteBlobModal, {
propsData: {
...initialProps,
...props,
},
+ attrs: {
+ static: true,
+ visible: true,
+ },
});
};
+ const createComponent = createComponentFactory(shallowMount);
+ const createFullComponent = createComponentFactory(mount);
+
const findModal = () => wrapper.findComponent(GlModal);
- const findForm = () => wrapper.findComponent({ ref: 'form' });
+ const findForm = () => findModal().findComponent(GlForm);
+ const findCommitTextarea = () => findForm().findComponent(GlFormTextarea);
+ const findTargetInput = () => findForm().findComponent(GlFormInput);
+ const findCommitHint = () => wrapper.find('[data-testid="hint"]');
+
+ const fillForm = async (inputValue = {}) => {
+ const { targetText, commitText } = inputValue;
+
+ await findTargetInput().vm.$emit('input', targetText);
+ await findCommitTextarea().vm.$emit('input', commitText);
+ };
afterEach(() => {
wrapper.destroy();
@@ -58,17 +75,6 @@ describe('DeleteBlobModal', () => {
expect(findForm().attributes('action')).toBe(initialProps.deletePath);
});
- it('submits the form', async () => {
- createComponent();
-
- const submitSpy = jest.spyOn(findForm().element, 'submit');
- findModal().vm.$emit('primary', { preventDefault: () => {} });
- await nextTick();
-
- expect(submitSpy).toHaveBeenCalled();
- submitSpy.mockRestore();
- });
-
it.each`
component | defaultValue | canPushCode | targetBranch | originalBranch | exist
${GlFormTextarea} | ${initialProps.commitMessage} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
@@ -127,4 +133,85 @@ describe('DeleteBlobModal', () => {
},
);
});
+
+ describe('hint', () => {
+ const targetText = 'some target branch';
+ const hintText = 'Try to keep the first line under 52 characters and the others under 72.';
+ const charsGenerator = (length) => 'lorem'.repeat(length);
+
+ beforeEach(async () => {
+ createFullComponent();
+ await nextTick();
+ });
+
+ it.each`
+ commitText | exist | desc
+ ${charsGenerator(53)} | ${true} | ${'first line length > 52'}
+ ${`lorem\n${charsGenerator(73)}`} | ${true} | ${'other line length > 72'}
+ ${charsGenerator(52)} | ${true} | ${'other line length = 52'}
+ ${`lorem\n${charsGenerator(72)}`} | ${true} | ${'other line length = 72'}
+ ${`lorem`} | ${false} | ${'first line length < 53'}
+ ${`lorem\nlorem`} | ${false} | ${'other line length < 53'}
+ `('displays hint $exist for $desc', async ({ commitText, exist }) => {
+ await fillForm({ targetText, commitText });
+
+ if (!exist) {
+ expect(findCommitHint().exists()).toBe(false);
+ return;
+ }
+
+ expect(findCommitHint().text()).toBe(hintText);
+ });
+ });
+
+ describe('form submission', () => {
+ let submitSpy;
+
+ beforeEach(async () => {
+ createFullComponent();
+ await nextTick();
+ submitSpy = jest.spyOn(findForm().element, 'submit');
+ });
+
+ afterEach(() => {
+ submitSpy.mockRestore();
+ });
+
+ describe('invalid form', () => {
+ beforeEach(async () => {
+ await fillForm({ targetText: '', commitText: '' });
+ });
+
+ it('disables submit button', async () => {
+ expect(findModal().props('actionPrimary').attributes[0]).toEqual(
+ expect.objectContaining({ disabled: true }),
+ );
+ });
+
+ it('does not submit form', async () => {
+ findModal().vm.$emit('primary', { preventDefault: () => {} });
+ expect(submitSpy).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('valid form', () => {
+ beforeEach(async () => {
+ await fillForm({
+ targetText: 'some valid target branch',
+ commitText: 'some valid commit message',
+ });
+ });
+
+ it('enables submit button', async () => {
+ expect(findModal().props('actionPrimary').attributes[0]).toEqual(
+ expect.objectContaining({ disabled: false }),
+ );
+ });
+
+ it('submits form', async () => {
+ findModal().vm.$emit('primary', { preventDefault: () => {} });
+ expect(submitSpy).toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/frontend/runner/runner_list/runner_list_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index 54b7d1f1bdb..c1596711be7 100644
--- a/spec/frontend/runner/runner_list/runner_list_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -1,11 +1,12 @@
import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { TEST_HOST } from 'helpers/test_constants';
+import setWindowLocation from 'helpers/set_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import { updateHistory } from '~/lib/utils/url_utility';
+import AdminRunnersApp from '~/runner/admin_runners/admin_runners_app.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerManualSetupHelp from '~/runner/components/runner_manual_setup_help.vue';
@@ -22,7 +23,6 @@ import {
RUNNER_PAGE_SIZE,
} from '~/runner/constants';
import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
-import RunnerListApp from '~/runner/runner_list/runner_list_app.vue';
import { captureException } from '~/runner/sentry_utils';
import { runnersData, runnersDataPaginated } from '../mock_data';
@@ -40,10 +40,9 @@ jest.mock('~/lib/utils/url_utility', () => ({
const localVue = createLocalVue();
localVue.use(VueApollo);
-describe('RunnerListApp', () => {
+describe('AdminRunnersApp', () => {
let wrapper;
let mockRunnersQuery;
- let originalLocation;
const findRunnerTypeHelp = () => wrapper.findComponent(RunnerTypeHelp);
const findRunnerManualSetupHelp = () => wrapper.findComponent(RunnerManualSetupHelp);
@@ -54,7 +53,7 @@ describe('RunnerListApp', () => {
const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
const handlers = [[getRunnersQuery, mockRunnersQuery]];
- wrapper = mountFn(RunnerListApp, {
+ wrapper = mountFn(AdminRunnersApp, {
localVue,
apolloProvider: createMockApollo(handlers),
propsData: {
@@ -65,22 +64,8 @@ describe('RunnerListApp', () => {
});
};
- const setQuery = (query) => {
- window.location.href = `${TEST_HOST}/admin/runners?${query}`;
- window.location.search = query;
- };
-
- beforeAll(() => {
- originalLocation = window.location;
- Object.defineProperty(window, 'location', { writable: true, value: { href: '', search: '' } });
- });
-
- afterAll(() => {
- window.location = originalLocation;
- });
-
beforeEach(async () => {
- setQuery('');
+ setWindowLocation('/admin/runners');
mockRunnersQuery = jest.fn().mockResolvedValue(runnersData);
createComponentWithApollo();
@@ -116,7 +101,7 @@ describe('RunnerListApp', () => {
describe('when a filter is preselected', () => {
beforeEach(async () => {
- setQuery(`?status[]=${STATUS_ACTIVE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
+ setWindowLocation(`?status[]=${STATUS_ACTIVE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
createComponentWithApollo();
await waitForPromises();
@@ -197,7 +182,7 @@ describe('RunnerListApp', () => {
it('error is reported to sentry', async () => {
expect(captureException).toHaveBeenCalledWith({
error: new Error('Network error: Error!'),
- component: 'RunnerListApp',
+ component: 'AdminRunnersApp',
});
});
diff --git a/spec/frontend/runner/components/runner_registration_token_reset_spec.js b/spec/frontend/runner/components/runner_registration_token_reset_spec.js
index 6dc207e369c..8b360b88417 100644
--- a/spec/frontend/runner/components/runner_registration_token_reset_spec.js
+++ b/spec/frontend/runner/components/runner_registration_token_reset_spec.js
@@ -1,11 +1,12 @@
import { GlButton } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash, { FLASH_TYPES } from '~/flash';
import RunnerRegistrationTokenReset from '~/runner/components/runner_registration_token_reset.vue';
-import { INSTANCE_TYPE } from '~/runner/constants';
+import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
import runnersRegistrationTokenResetMutation from '~/runner/graphql/runners_registration_token_reset.mutation.graphql';
import { captureException } from '~/runner/sentry_utils';
@@ -23,11 +24,13 @@ describe('RunnerRegistrationTokenReset', () => {
const findButton = () => wrapper.findComponent(GlButton);
- const createComponent = () => {
+ const createComponent = ({ props, provide = {} } = {}) => {
wrapper = shallowMount(RunnerRegistrationTokenReset, {
localVue,
+ provide,
propsData: {
type: INSTANCE_TYPE,
+ ...props,
},
apolloProvider: createMockApollo([
[runnersRegistrationTokenResetMutation, runnersRegistrationTokenResetMutationHandler],
@@ -59,31 +62,47 @@ describe('RunnerRegistrationTokenReset', () => {
});
describe('On click and confirmation', () => {
- beforeEach(async () => {
- window.confirm.mockReturnValueOnce(true);
- await findButton().vm.$emit('click');
- });
+ const mockGroupId = '11';
+ const mockProjectId = '22';
+
+ describe.each`
+ type | provide | expectedInput
+ ${INSTANCE_TYPE} | ${{}} | ${{ type: INSTANCE_TYPE }}
+ ${GROUP_TYPE} | ${{ groupId: mockGroupId }} | ${{ type: GROUP_TYPE, id: `gid://gitlab/Group/${mockGroupId}` }}
+ ${PROJECT_TYPE} | ${{ projectId: mockProjectId }} | ${{ type: PROJECT_TYPE, id: `gid://gitlab/Project/${mockProjectId}` }}
+ `('Resets token of type $type', ({ type, provide, expectedInput }) => {
+ beforeEach(async () => {
+ createComponent({
+ provide,
+ props: { type },
+ });
+
+ window.confirm.mockReturnValueOnce(true);
+ findButton().vm.$emit('click');
+ await waitForPromises();
+ });
- it('resets token', () => {
- expect(runnersRegistrationTokenResetMutationHandler).toHaveBeenCalledTimes(1);
- expect(runnersRegistrationTokenResetMutationHandler).toHaveBeenCalledWith({
- input: { type: INSTANCE_TYPE },
+ it('resets token', () => {
+ expect(runnersRegistrationTokenResetMutationHandler).toHaveBeenCalledTimes(1);
+ expect(runnersRegistrationTokenResetMutationHandler).toHaveBeenCalledWith({
+ input: expectedInput,
+ });
});
- });
- it('emits result', () => {
- expect(wrapper.emitted('tokenReset')).toHaveLength(1);
- expect(wrapper.emitted('tokenReset')[0]).toEqual([mockNewToken]);
- });
+ it('emits result', () => {
+ expect(wrapper.emitted('tokenReset')).toHaveLength(1);
+ expect(wrapper.emitted('tokenReset')[0]).toEqual([mockNewToken]);
+ });
- it('does not show a loading state', () => {
- expect(findButton().props('loading')).toBe(false);
- });
+ it('does not show a loading state', () => {
+ expect(findButton().props('loading')).toBe(false);
+ });
- it('shows confirmation', () => {
- expect(createFlash).toHaveBeenLastCalledWith({
- message: expect.stringContaining('registration token generated'),
- type: FLASH_TYPES.SUCCESS,
+ it('shows confirmation', () => {
+ expect(createFlash).toHaveBeenLastCalledWith({
+ message: expect.stringContaining('registration token generated'),
+ type: FLASH_TYPES.SUCCESS,
+ });
});
});
});
@@ -91,7 +110,8 @@ describe('RunnerRegistrationTokenReset', () => {
describe('On click without confirmation', () => {
beforeEach(async () => {
window.confirm.mockReturnValueOnce(false);
- await findButton().vm.$emit('click');
+ findButton().vm.$emit('click');
+ await waitForPromises();
});
it('does not reset token', () => {
@@ -118,7 +138,7 @@ describe('RunnerRegistrationTokenReset', () => {
runnersRegistrationTokenResetMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
window.confirm.mockReturnValueOnce(true);
- await findButton().vm.$emit('click');
+ findButton().vm.$emit('click');
await waitForPromises();
expect(createFlash).toHaveBeenLastCalledWith({
@@ -144,7 +164,7 @@ describe('RunnerRegistrationTokenReset', () => {
});
window.confirm.mockReturnValueOnce(true);
- await findButton().vm.$emit('click');
+ findButton().vm.$emit('click');
await waitForPromises();
expect(createFlash).toHaveBeenLastCalledWith({
@@ -160,7 +180,8 @@ describe('RunnerRegistrationTokenReset', () => {
describe('Immediately after click', () => {
it('shows loading state', async () => {
window.confirm.mockReturnValue(true);
- await findButton().vm.$emit('click');
+ findButton().vm.$emit('click');
+ await nextTick();
expect(findButton().props('loading')).toBe(true);
});
diff --git a/spec/frontend/runner/components/runner_type_alert_spec.js b/spec/frontend/runner/components/runner_type_alert_spec.js
index 5b136a77eeb..e54e499743b 100644
--- a/spec/frontend/runner/components/runner_type_alert_spec.js
+++ b/spec/frontend/runner/components/runner_type_alert_spec.js
@@ -23,10 +23,10 @@ describe('RunnerTypeAlert', () => {
});
describe.each`
- type | exampleText | anchor | variant
- ${INSTANCE_TYPE} | ${'Shared runners are available to every project'} | ${'#shared-runners'} | ${'success'}
- ${GROUP_TYPE} | ${'Use Group runners when you want all projects in a group'} | ${'#group-runners'} | ${'success'}
- ${PROJECT_TYPE} | ${'You can set up a specific runner to be used by multiple projects'} | ${'#specific-runners'} | ${'info'}
+ type | exampleText | anchor | variant
+ ${INSTANCE_TYPE} | ${'This runner is available to all groups and projects'} | ${'#shared-runners'} | ${'success'}
+ ${GROUP_TYPE} | ${'This runner is available to all projects and subgroups in a group'} | ${'#group-runners'} | ${'success'}
+ ${PROJECT_TYPE} | ${'This runner is associated with one or more projects'} | ${'#specific-runners'} | ${'info'}
`('When it is an $type level runner', ({ type, exampleText, anchor, variant }) => {
beforeEach(() => {
createComponent({ props: { type } });
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
new file mode 100644
index 00000000000..6a0863e92b4
--- /dev/null
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import RunnerManualSetupHelp from '~/runner/components/runner_manual_setup_help.vue';
+import RunnerTypeHelp from '~/runner/components/runner_type_help.vue';
+import GroupRunnersApp from '~/runner/group_runners/group_runners_app.vue';
+
+const mockRegistrationToken = 'AABBCC';
+
+describe('GroupRunnersApp', () => {
+ let wrapper;
+
+ const findRunnerTypeHelp = () => wrapper.findComponent(RunnerTypeHelp);
+ const findRunnerManualSetupHelp = () => wrapper.findComponent(RunnerManualSetupHelp);
+
+ const createComponent = ({ mountFn = shallowMount } = {}) => {
+ wrapper = mountFn(GroupRunnersApp, {
+ propsData: {
+ registrationToken: mockRegistrationToken,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows the runner type help', () => {
+ expect(findRunnerTypeHelp().exists()).toBe(true);
+ });
+
+ it('shows the runner setup instructions', () => {
+ expect(findRunnerManualSetupHelp().exists()).toBe(true);
+ expect(findRunnerManualSetupHelp().props('registrationToken')).toBe(mockRegistrationToken);
+ });
+});
diff --git a/spec/frontend/runner/runner_list/runner_search_utils_spec.js b/spec/frontend/runner/runner_search_utils_spec.js
index e7969676549..3a0c3abe7bd 100644
--- a/spec/frontend/runner/runner_list/runner_search_utils_spec.js
+++ b/spec/frontend/runner/runner_search_utils_spec.js
@@ -3,7 +3,7 @@ import {
fromUrlQueryToSearch,
fromSearchToUrl,
fromSearchToVariables,
-} from '~/runner/runner_list/runner_search_utils';
+} from '~/runner/runner_search_utils';
describe('search_params.js', () => {
const examples = [
diff --git a/spec/frontend/search/index_spec.js b/spec/frontend/search/index_spec.js
index 1992a7f4437..c07cd74b456 100644
--- a/spec/frontend/search/index_spec.js
+++ b/spec/frontend/search/index_spec.js
@@ -1,4 +1,5 @@
import setHighlightClass from 'ee_else_ce/search/highlight_blob_search_result';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { initSearchApp } from '~/search';
import createStore from '~/search/store';
@@ -8,25 +9,6 @@ jest.mock('~/search/sidebar');
jest.mock('ee_else_ce/search/highlight_blob_search_result');
describe('initSearchApp', () => {
- let defaultLocation;
-
- const setUrl = (query) => {
- window.location.href = `https://localhost:3000/search${query}`;
- window.location.search = query;
- };
-
- beforeEach(() => {
- defaultLocation = window.location;
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: '', search: '' },
- });
- });
-
- afterEach(() => {
- window.location = defaultLocation;
- });
-
describe.each`
search | decodedSearch
${'test'} | ${'test'}
@@ -38,7 +20,7 @@ describe('initSearchApp', () => {
${'test+%2520+this+%2520+stuff'} | ${'test %20 this %20 stuff'}
`('parameter decoding', ({ search, decodedSearch }) => {
beforeEach(() => {
- setUrl(`?search=${search}`);
+ setWindowLocation(`/search?search=${search}`);
initSearchApp();
});
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index 24ce45e8a09..0542e96c77c 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -86,18 +86,21 @@ export const STALE_STORED_DATA = [
export const MOCK_FRESH_DATA_RES = { name: 'fresh' };
-export const PROMISE_ALL_EXPECTED_MUTATIONS = {
- initGroups: {
+export const PRELOAD_EXPECTED_MUTATIONS = [
+ {
type: types.LOAD_FREQUENT_ITEMS,
payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
},
- resGroups: {
+ {
type: types.LOAD_FREQUENT_ITEMS,
- payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: [MOCK_FRESH_DATA_RES, MOCK_FRESH_DATA_RES] },
+ payload: { key: PROJECTS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
},
- initProjects: {
+];
+
+export const PROMISE_ALL_EXPECTED_MUTATIONS = {
+ resGroups: {
type: types.LOAD_FREQUENT_ITEMS,
- payload: { key: PROJECTS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
+ payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: [MOCK_FRESH_DATA_RES, MOCK_FRESH_DATA_RES] },
},
resProjects: {
type: types.LOAD_FREQUENT_ITEMS,
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 3755f8ffae7..9f8c83f2873 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -17,6 +17,7 @@ import {
MOCK_GROUP,
FRESH_STORED_DATA,
MOCK_FRESH_DATA_RES,
+ PRELOAD_EXPECTED_MUTATIONS,
PROMISE_ALL_EXPECTED_MUTATIONS,
} from '../mock_data';
@@ -68,31 +69,31 @@ describe('Global Search Store Actions', () => {
});
describe.each`
- action | axiosMock | type | expectedMutations | flashCallCount | lsKey
- ${actions.loadFrequentGroups} | ${{ method: 'onGet', code: 200 }} | ${'success'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initGroups, PROMISE_ALL_EXPECTED_MUTATIONS.resGroups]} | ${0} | ${GROUPS_LOCAL_STORAGE_KEY}
- ${actions.loadFrequentGroups} | ${{ method: 'onGet', code: 500 }} | ${'error'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initGroups]} | ${1} | ${GROUPS_LOCAL_STORAGE_KEY}
- ${actions.loadFrequentProjects} | ${{ method: 'onGet', code: 200 }} | ${'success'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initProjects, PROMISE_ALL_EXPECTED_MUTATIONS.resProjects]} | ${0} | ${PROJECTS_LOCAL_STORAGE_KEY}
- ${actions.loadFrequentProjects} | ${{ method: 'onGet', code: 500 }} | ${'error'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initProjects]} | ${1} | ${PROJECTS_LOCAL_STORAGE_KEY}
- `(
- 'Promise.all calls',
- ({ action, axiosMock, type, expectedMutations, flashCallCount, lsKey }) => {
- describe(action.name, () => {
- describe(`on ${type}`, () => {
- beforeEach(() => {
- storeUtils.loadDataFromLS = jest.fn().mockReturnValue(FRESH_STORED_DATA);
- mock[axiosMock.method]().reply(axiosMock.code, MOCK_FRESH_DATA_RES);
- });
+ action | axiosMock | type | expectedMutations | flashCallCount
+ ${actions.loadFrequentGroups} | ${{ method: 'onGet', code: 200 }} | ${'success'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.resGroups]} | ${0}
+ ${actions.loadFrequentGroups} | ${{ method: 'onGet', code: 500 }} | ${'error'} | ${[]} | ${1}
+ ${actions.loadFrequentProjects} | ${{ method: 'onGet', code: 200 }} | ${'success'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.resProjects]} | ${0}
+ ${actions.loadFrequentProjects} | ${{ method: 'onGet', code: 500 }} | ${'error'} | ${[]} | ${1}
+ `('Promise.all calls', ({ action, axiosMock, type, expectedMutations, flashCallCount }) => {
+ describe(action.name, () => {
+ describe(`on ${type}`, () => {
+ beforeEach(() => {
+ state.frequentItems = {
+ [GROUPS_LOCAL_STORAGE_KEY]: FRESH_STORED_DATA,
+ [PROJECTS_LOCAL_STORAGE_KEY]: FRESH_STORED_DATA,
+ };
+
+ mock[axiosMock.method]().reply(axiosMock.code, MOCK_FRESH_DATA_RES);
+ });
- it(`should dispatch the correct mutations`, () => {
- return testAction({ action, state, expectedMutations }).then(() => {
- expect(storeUtils.loadDataFromLS).toHaveBeenCalledWith(lsKey);
- flashCallback(flashCallCount);
- });
+ it(`should dispatch the correct mutations`, () => {
+ return testAction({ action, state, expectedMutations }).then(() => {
+ flashCallback(flashCallCount);
});
});
});
- },
- );
+ });
+ });
describe('getGroupsData', () => {
const mockCommit = () => {};
@@ -182,14 +183,38 @@ describe('Global Search Store Actions', () => {
});
});
+ describe('preloadStoredFrequentItems', () => {
+ beforeEach(() => {
+ storeUtils.loadDataFromLS = jest.fn().mockReturnValue(FRESH_STORED_DATA);
+ });
+
+ it('calls preloadStoredFrequentItems for both groups and projects and commits LOAD_FREQUENT_ITEMS', async () => {
+ await testAction({
+ action: actions.preloadStoredFrequentItems,
+ state,
+ expectedMutations: PRELOAD_EXPECTED_MUTATIONS,
+ });
+
+ expect(storeUtils.loadDataFromLS).toHaveBeenCalledTimes(2);
+ expect(storeUtils.loadDataFromLS).toHaveBeenCalledWith(GROUPS_LOCAL_STORAGE_KEY);
+ expect(storeUtils.loadDataFromLS).toHaveBeenCalledWith(PROJECTS_LOCAL_STORAGE_KEY);
+ });
+ });
+
describe('setFrequentGroup', () => {
beforeEach(() => {
- storeUtils.setFrequentItemToLS = jest.fn();
+ storeUtils.setFrequentItemToLS = jest.fn().mockReturnValue(FRESH_STORED_DATA);
});
- it(`calls setFrequentItemToLS with ${GROUPS_LOCAL_STORAGE_KEY} and item data`, async () => {
+ it(`calls setFrequentItemToLS with ${GROUPS_LOCAL_STORAGE_KEY} and item data then commits LOAD_FREQUENT_ITEMS`, async () => {
await testAction({
action: actions.setFrequentGroup,
+ expectedMutations: [
+ {
+ type: types.LOAD_FREQUENT_ITEMS,
+ payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
+ },
+ ],
payload: MOCK_GROUP,
state,
});
@@ -204,12 +229,18 @@ describe('Global Search Store Actions', () => {
describe('setFrequentProject', () => {
beforeEach(() => {
- storeUtils.setFrequentItemToLS = jest.fn();
+ storeUtils.setFrequentItemToLS = jest.fn().mockReturnValue(FRESH_STORED_DATA);
});
it(`calls setFrequentItemToLS with ${PROJECTS_LOCAL_STORAGE_KEY} and item data`, async () => {
await testAction({
action: actions.setFrequentProject,
+ expectedMutations: [
+ {
+ type: types.LOAD_FREQUENT_ITEMS,
+ payload: { key: PROJECTS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
+ },
+ ],
payload: MOCK_PROJECT,
state,
});
diff --git a/spec/frontend/search/store/utils_spec.js b/spec/frontend/search/store/utils_spec.js
index 5055fa2cc3d..cd7f7dc3b5f 100644
--- a/spec/frontend/search/store/utils_spec.js
+++ b/spec/frontend/search/store/utils_spec.js
@@ -51,19 +51,25 @@ describe('Global Search Store Utils', () => {
describe('setFrequentItemToLS', () => {
const frequentItems = {};
+ let res;
describe('with existing data', () => {
describe(`when frequency is less than ${MAX_FREQUENCY}`, () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: PREV_TIME }];
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
- it('adds 1 to the frequency, tracks lastUsed, and calls localStorage.setItem', () => {
+ it('adds 1 to the frequency, tracks lastUsed, calls localStorage.setItem and returns the array', () => {
+ const updatedFrequentItems = [
+ { ...MOCK_GROUPS[0], frequency: 2, lastUsed: CURRENT_TIME },
+ ];
+
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
- JSON.stringify([{ ...MOCK_GROUPS[0], frequency: 2, lastUsed: CURRENT_TIME }]),
+ JSON.stringify(updatedFrequentItems),
);
+ expect(res).toEqual(updatedFrequentItems);
});
});
@@ -72,16 +78,19 @@ describe('Global Search Store Utils', () => {
frequentItems[MOCK_LS_KEY] = [
{ ...MOCK_GROUPS[0], frequency: MAX_FREQUENCY, lastUsed: PREV_TIME },
];
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
- it(`does not further increase frequency past ${MAX_FREQUENCY}, tracks lastUsed, and calls localStorage.setItem`, () => {
+ it(`does not further increase frequency past ${MAX_FREQUENCY}, tracks lastUsed, calls localStorage.setItem, and returns the array`, () => {
+ const updatedFrequentItems = [
+ { ...MOCK_GROUPS[0], frequency: MAX_FREQUENCY, lastUsed: CURRENT_TIME },
+ ];
+
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
- JSON.stringify([
- { ...MOCK_GROUPS[0], frequency: MAX_FREQUENCY, lastUsed: CURRENT_TIME },
- ]),
+ JSON.stringify(updatedFrequentItems),
);
+ expect(res).toEqual(updatedFrequentItems);
});
});
});
@@ -89,14 +98,17 @@ describe('Global Search Store Utils', () => {
describe('with no existing data', () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [];
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
- it('adds a new entry with frequency 1, tracks lastUsed, and calls localStorage.setItem', () => {
+ it('adds a new entry with frequency 1, tracks lastUsed, calls localStorage.setItem, and returns the array', () => {
+ const updatedFrequentItems = [{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: CURRENT_TIME }];
+
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
- JSON.stringify([{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: CURRENT_TIME }]),
+ JSON.stringify(updatedFrequentItems),
);
+ expect(res).toEqual(updatedFrequentItems);
});
});
@@ -107,18 +119,21 @@ describe('Global Search Store Utils', () => {
{ id: 2, frequency: 1, lastUsed: PREV_TIME },
{ id: 3, frequency: 1, lastUsed: PREV_TIME },
];
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 3 });
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 3 });
});
- it('sorts the array by most frequent and lastUsed', () => {
+ it('sorts the array by most frequent and lastUsed and returns the array', () => {
+ const updatedFrequentItems = [
+ { id: 3, frequency: 2, lastUsed: CURRENT_TIME },
+ { id: 1, frequency: 2, lastUsed: PREV_TIME },
+ { id: 2, frequency: 1, lastUsed: PREV_TIME },
+ ];
+
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
- JSON.stringify([
- { id: 3, frequency: 2, lastUsed: CURRENT_TIME },
- { id: 1, frequency: 2, lastUsed: PREV_TIME },
- { id: 2, frequency: 1, lastUsed: PREV_TIME },
- ]),
+ JSON.stringify(updatedFrequentItems),
);
+ expect(res).toEqual(updatedFrequentItems);
});
});
@@ -131,31 +146,35 @@ describe('Global Search Store Utils', () => {
{ id: 4, frequency: 2, lastUsed: PREV_TIME },
{ id: 5, frequency: 1, lastUsed: PREV_TIME },
];
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 6 });
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 6 });
});
- it('removes the last item in the array', () => {
+ it('removes the last item in the array and returns the array', () => {
+ const updatedFrequentItems = [
+ { id: 1, frequency: 5, lastUsed: PREV_TIME },
+ { id: 2, frequency: 4, lastUsed: PREV_TIME },
+ { id: 3, frequency: 3, lastUsed: PREV_TIME },
+ { id: 4, frequency: 2, lastUsed: PREV_TIME },
+ { id: 6, frequency: 1, lastUsed: CURRENT_TIME },
+ ];
+
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
- JSON.stringify([
- { id: 1, frequency: 5, lastUsed: PREV_TIME },
- { id: 2, frequency: 4, lastUsed: PREV_TIME },
- { id: 3, frequency: 3, lastUsed: PREV_TIME },
- { id: 4, frequency: 2, lastUsed: PREV_TIME },
- { id: 6, frequency: 1, lastUsed: CURRENT_TIME },
- ]),
+ JSON.stringify(updatedFrequentItems),
);
+ expect(res).toEqual(updatedFrequentItems);
});
});
describe('with null data loaded in', () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = null;
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
- it('wipes local storage', () => {
+ it('wipes local storage and returns empty array', () => {
expect(localStorage.removeItem).toHaveBeenCalledWith(MOCK_LS_KEY);
+ expect(res).toEqual([]);
});
});
@@ -163,14 +182,17 @@ describe('Global Search Store Utils', () => {
beforeEach(() => {
const MOCK_ADDITIONAL_DATA_GROUP = { ...MOCK_GROUPS[0], extraData: 'test' };
frequentItems[MOCK_LS_KEY] = [];
- setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_ADDITIONAL_DATA_GROUP);
+ res = setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_ADDITIONAL_DATA_GROUP);
});
- it('parses out extra data for LS', () => {
+ it('parses out extra data for LS and returns the array', () => {
+ const updatedFrequentItems = [{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: CURRENT_TIME }];
+
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
- JSON.stringify([{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: CURRENT_TIME }]),
+ JSON.stringify(updatedFrequentItems),
);
+ expect(res).toEqual(updatedFrequentItems);
});
});
});
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
index fb953f2ed1b..7ce5efb3c52 100644
--- a/spec/frontend/search/topbar/components/app_spec.js
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -1,13 +1,13 @@
import { GlForm, GlSearchBoxByType, GlButton } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import GlobalSearchTopbar from '~/search/topbar/components/app.vue';
import GroupFilter from '~/search/topbar/components/group_filter.vue';
import ProjectFilter from '~/search/topbar/components/project_filter.vue';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
describe('GlobalSearchTopbar', () => {
let wrapper;
@@ -15,6 +15,7 @@ describe('GlobalSearchTopbar', () => {
const actionSpies = {
applyQuery: jest.fn(),
setQuery: jest.fn(),
+ preloadStoredFrequentItems: jest.fn(),
};
const createComponent = (initialState) => {
@@ -27,14 +28,12 @@ describe('GlobalSearchTopbar', () => {
});
wrapper = shallowMount(GlobalSearchTopbar, {
- localVue,
store,
});
};
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
const findTopbarForm = () => wrapper.find(GlForm);
@@ -110,4 +109,14 @@ describe('GlobalSearchTopbar', () => {
expect(actionSpies.applyQuery).toHaveBeenCalled();
});
});
+
+ describe('onCreate', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('calls preloadStoredFrequentItems', () => {
+ expect(actionSpies.preloadStoredFrequentItems).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/search/topbar/components/group_filter_spec.js b/spec/frontend/search/topbar/components/group_filter_spec.js
index fbd7ad6bb57..bd173791fee 100644
--- a/spec/frontend/search/topbar/components/group_filter_spec.js
+++ b/spec/frontend/search/topbar/components/group_filter_spec.js
@@ -51,7 +51,6 @@ describe('GroupFilter', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
const findSearchableDropdown = () => wrapper.find(SearchableDropdown);
@@ -89,10 +88,11 @@ describe('GroupFilter', () => {
findSearchableDropdown().vm.$emit('change', ANY_OPTION);
});
- it('calls setUrlParams with group null, project id null, and then calls visitUrl', () => {
+ it('calls setUrlParams with group null, project id null, nav_source null, and then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[GROUP_DATA.queryParam]: null,
[PROJECT_DATA.queryParam]: null,
+ nav_source: null,
});
expect(visitUrl).toHaveBeenCalled();
@@ -108,10 +108,11 @@ describe('GroupFilter', () => {
findSearchableDropdown().vm.$emit('change', MOCK_GROUP);
});
- it('calls setUrlParams with group id, project id null, and then calls visitUrl', () => {
+ it('calls setUrlParams with group id, project id null, nav_source null, and then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[GROUP_DATA.queryParam]: MOCK_GROUP.id,
[PROJECT_DATA.queryParam]: null,
+ nav_source: null,
});
expect(visitUrl).toHaveBeenCalled();
@@ -156,4 +157,31 @@ describe('GroupFilter', () => {
});
});
});
+
+ describe.each`
+ navSource | initialData | callMethod
+ ${null} | ${null} | ${false}
+ ${null} | ${MOCK_GROUP} | ${false}
+ ${'navbar'} | ${null} | ${false}
+ ${'navbar'} | ${MOCK_GROUP} | ${true}
+ `('onCreate', ({ navSource, initialData, callMethod }) => {
+ describe(`when nav_source is ${navSource} and ${
+ initialData ? 'has' : 'does not have'
+ } an initial group`, () => {
+ beforeEach(() => {
+ createComponent({ query: { ...MOCK_QUERY, nav_source: navSource } }, { initialData });
+ });
+
+ it(`${callMethod ? 'does' : 'does not'} call setFrequentGroup`, () => {
+ if (callMethod) {
+ expect(actionSpies.setFrequentGroup).toHaveBeenCalledWith(
+ expect.any(Object),
+ initialData,
+ );
+ } else {
+ expect(actionSpies.setFrequentGroup).not.toHaveBeenCalled();
+ }
+ });
+ });
+ });
});
diff --git a/spec/frontend/search/topbar/components/project_filter_spec.js b/spec/frontend/search/topbar/components/project_filter_spec.js
index 63b0f882ca4..5afcd281d0c 100644
--- a/spec/frontend/search/topbar/components/project_filter_spec.js
+++ b/spec/frontend/search/topbar/components/project_filter_spec.js
@@ -51,7 +51,6 @@ describe('ProjectFilter', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
const findSearchableDropdown = () => wrapper.find(SearchableDropdown);
@@ -89,9 +88,10 @@ describe('ProjectFilter', () => {
findSearchableDropdown().vm.$emit('change', ANY_OPTION);
});
- it('calls setUrlParams with null, no group id, then calls visitUrl', () => {
+ it('calls setUrlParams with null, no group id, nav_source null, then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[PROJECT_DATA.queryParam]: null,
+ nav_source: null,
});
expect(visitUrl).toHaveBeenCalled();
});
@@ -106,10 +106,11 @@ describe('ProjectFilter', () => {
findSearchableDropdown().vm.$emit('change', MOCK_PROJECT);
});
- it('calls setUrlParams with project id, group id, then calls visitUrl', () => {
+ it('calls setUrlParams with project id, group id, nav_source null, then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[GROUP_DATA.queryParam]: MOCK_PROJECT.namespace.id,
[PROJECT_DATA.queryParam]: MOCK_PROJECT.id,
+ nav_source: null,
});
expect(visitUrl).toHaveBeenCalled();
});
@@ -157,4 +158,31 @@ describe('ProjectFilter', () => {
});
});
});
+
+ describe.each`
+ navSource | initialData | callMethod
+ ${null} | ${null} | ${false}
+ ${null} | ${MOCK_PROJECT} | ${false}
+ ${'navbar'} | ${null} | ${false}
+ ${'navbar'} | ${MOCK_PROJECT} | ${true}
+ `('onCreate', ({ navSource, initialData, callMethod }) => {
+ describe(`when nav_source is ${navSource} and ${
+ initialData ? 'has' : 'does not have'
+ } an initial project`, () => {
+ beforeEach(() => {
+ createComponent({ query: { ...MOCK_QUERY, nav_source: navSource } }, { initialData });
+ });
+
+ it(`${callMethod ? 'does' : 'does not'} call setFrequentProject`, () => {
+ if (callMethod) {
+ expect(actionSpies.setFrequentProject).toHaveBeenCalledWith(
+ expect.any(Object),
+ initialData,
+ );
+ } else {
+ expect(actionSpies.setFrequentProject).not.toHaveBeenCalled();
+ }
+ });
+ });
+ });
});
diff --git a/spec/frontend/security_configuration/app_spec.js b/spec/frontend/security_configuration/app_spec.js
deleted file mode 100644
index 11d481fb210..00000000000
--- a/spec/frontend/security_configuration/app_spec.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import App from '~/security_configuration/components/app.vue';
-import ConfigurationTable from '~/security_configuration/components/configuration_table.vue';
-
-describe('App Component', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = shallowMount(App, {});
- };
- const findConfigurationTable = () => wrapper.findComponent(ConfigurationTable);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders correct primary & Secondary Heading', () => {
- createComponent();
- expect(wrapper.text()).toContain('Security Configuration');
- expect(wrapper.text()).toContain('Testing & Compliance');
- });
-
- it('renders ConfigurationTable Component', () => {
- createComponent();
- expect(findConfigurationTable().exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/security_configuration/components/redesigned_app_spec.js b/spec/frontend/security_configuration/components/app_spec.js
index 119a25a77c1..f27f45f2b26 100644
--- a/spec/frontend/security_configuration/components/redesigned_app_spec.js
+++ b/spec/frontend/security_configuration/components/app_spec.js
@@ -1,8 +1,12 @@
import { GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
+import stubChildren from 'helpers/stub_children';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import SecurityConfigurationApp, { i18n } from '~/security_configuration/components/app.vue';
import AutoDevopsAlert from '~/security_configuration/components/auto_dev_ops_alert.vue';
+import AutoDevopsEnabledAlert from '~/security_configuration/components/auto_dev_ops_enabled_alert.vue';
import {
SAST_NAME,
SAST_SHORT_NAME,
@@ -12,12 +16,10 @@ import {
LICENSE_COMPLIANCE_NAME,
LICENSE_COMPLIANCE_DESCRIPTION,
LICENSE_COMPLIANCE_HELP_PATH,
+ AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY,
} from '~/security_configuration/components/constants';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
-import RedesignedSecurityConfigurationApp, {
- i18n,
-} from '~/security_configuration/components/redesigned_app.vue';
import UpgradeBanner from '~/security_configuration/components/upgrade_banner.vue';
import {
REPORT_TYPE_LICENSE_COMPLIANCE,
@@ -28,8 +30,11 @@ const upgradePath = '/upgrade';
const autoDevopsHelpPagePath = '/autoDevopsHelpPagePath';
const autoDevopsPath = '/autoDevopsPath';
const gitlabCiHistoryPath = 'test/historyPath';
+const projectPath = 'namespace/project';
-describe('redesigned App component', () => {
+useLocalStorageSpy();
+
+describe('App component', () => {
let wrapper;
let userCalloutDismissSpy;
@@ -37,14 +42,20 @@ describe('redesigned App component', () => {
userCalloutDismissSpy = jest.fn();
wrapper = extendedWrapper(
- mount(RedesignedSecurityConfigurationApp, {
+ mount(SecurityConfigurationApp, {
propsData,
provide: {
upgradePath,
autoDevopsHelpPagePath,
autoDevopsPath,
+ projectPath,
},
stubs: {
+ ...stubChildren(SecurityConfigurationApp),
+ GlLink: false,
+ GlSprintf: false,
+ LocalStorageSync: false,
+ SectionLayout: false,
UserCalloutDismisser: makeMockUserCalloutDismisser({
dismiss: userCalloutDismissSpy,
shouldShowCallout,
@@ -83,6 +94,7 @@ describe('redesigned App component', () => {
});
const findUpgradeBanner = () => wrapper.findComponent(UpgradeBanner);
const findAutoDevopsAlert = () => wrapper.findComponent(AutoDevopsAlert);
+ const findAutoDevopsEnabledAlert = () => wrapper.findComponent(AutoDevopsEnabledAlert);
const securityFeaturesMock = [
{
@@ -161,7 +173,7 @@ describe('redesigned App component', () => {
});
});
- describe('autoDevOpsAlert', () => {
+ describe('Auto DevOps hint alert', () => {
describe('given the right props', () => {
beforeEach(() => {
createComponent({
@@ -199,6 +211,76 @@ describe('redesigned App component', () => {
});
});
+ describe('Auto DevOps enabled alert', () => {
+ describe.each`
+ context | autoDevopsEnabled | localStorageValue | shouldRender
+ ${'enabled'} | ${true} | ${null} | ${true}
+ ${'enabled, alert dismissed on other project'} | ${true} | ${['foo/bar']} | ${true}
+ ${'enabled, alert dismissed on this project'} | ${true} | ${[projectPath]} | ${false}
+ ${'not enabled'} | ${false} | ${null} | ${false}
+ `('given Auto DevOps is $context', ({ autoDevopsEnabled, localStorageValue, shouldRender }) => {
+ beforeEach(() => {
+ if (localStorageValue !== null) {
+ window.localStorage.setItem(
+ AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY,
+ JSON.stringify(localStorageValue),
+ );
+ }
+
+ createComponent({
+ augmentedSecurityFeatures: securityFeaturesMock,
+ augmentedComplianceFeatures: complianceFeaturesMock,
+ autoDevopsEnabled,
+ });
+ });
+
+ it(shouldRender ? 'renders' : 'does not render', () => {
+ expect(findAutoDevopsEnabledAlert().exists()).toBe(shouldRender);
+ });
+ });
+
+ describe('dismissing', () => {
+ describe.each`
+ dismissedProjects | expectedWrittenValue
+ ${null} | ${[projectPath]}
+ ${[]} | ${[projectPath]}
+ ${['foo/bar']} | ${['foo/bar', projectPath]}
+ ${[projectPath]} | ${[projectPath]}
+ `(
+ 'given dismissed projects $dismissedProjects',
+ ({ dismissedProjects, expectedWrittenValue }) => {
+ beforeEach(() => {
+ if (dismissedProjects !== null) {
+ window.localStorage.setItem(
+ AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY,
+ JSON.stringify(dismissedProjects),
+ );
+ }
+
+ createComponent({
+ augmentedSecurityFeatures: securityFeaturesMock,
+ augmentedComplianceFeatures: complianceFeaturesMock,
+ autoDevopsEnabled: true,
+ });
+
+ findAutoDevopsEnabledAlert().vm.$emit('dismiss');
+ });
+
+ it('adds current project to localStorage value', () => {
+ expect(window.localStorage.setItem).toHaveBeenLastCalledWith(
+ AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY,
+ JSON.stringify(expectedWrittenValue),
+ );
+ });
+
+ it('hides the alert', () => {
+ expect(findAutoDevopsEnabledAlert().exists()).toBe(false);
+ });
+ },
+ );
+ });
+ });
+
describe('upgrade banner', () => {
const makeAvailable = (available) => (feature) => ({ ...feature, available });
diff --git a/spec/frontend/security_configuration/components/auto_dev_ops_enabled_alert_spec.js b/spec/frontend/security_configuration/components/auto_dev_ops_enabled_alert_spec.js
new file mode 100644
index 00000000000..778fea2896a
--- /dev/null
+++ b/spec/frontend/security_configuration/components/auto_dev_ops_enabled_alert_spec.js
@@ -0,0 +1,46 @@
+import { GlAlert } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import AutoDevopsEnabledAlert from '~/security_configuration/components/auto_dev_ops_enabled_alert.vue';
+
+const autoDevopsHelpPagePath = '/autoDevopsHelpPagePath';
+
+describe('AutoDevopsEnabledAlert component', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mount(AutoDevopsEnabledAlert, {
+ provide: {
+ autoDevopsHelpPagePath,
+ },
+ });
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains correct body text', () => {
+ expect(wrapper.text()).toMatchInterpolatedText(AutoDevopsEnabledAlert.i18n.body);
+ });
+
+ it('renders the link correctly', () => {
+ const link = wrapper.find('a[href]');
+
+ expect(link.attributes('href')).toBe(autoDevopsHelpPagePath);
+ expect(link.text()).toBe('Auto DevOps');
+ });
+
+ it('bubbles up dismiss events from the GlAlert', () => {
+ expect(wrapper.emitted('dismiss')).toBe(undefined);
+
+ findAlert().vm.$emit('dismiss');
+
+ expect(wrapper.emitted('dismiss')).toEqual([[]]);
+ });
+});
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
index 3658dbb5ef2..fdb1d2f86e3 100644
--- a/spec/frontend/security_configuration/components/feature_card_spec.js
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -127,25 +127,35 @@ describe('FeatureCard component', () => {
describe('actions', () => {
describe.each`
- context | type | available | configured | configurationPath | canEnableByMergeRequest | action
- ${'unavailable'} | ${REPORT_TYPE_SAST} | ${false} | ${false} | ${null} | ${false} | ${null}
- ${'available'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${null} | ${false} | ${'guide'}
- ${'configured'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${null} | ${false} | ${'guide'}
- ${'available, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${null} | ${true} | ${'create-mr'}
- ${'available, can enable by MR, unknown type'} | ${'foo'} | ${true} | ${false} | ${null} | ${true} | ${'guide'}
- ${'configured, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${null} | ${true} | ${'guide'}
- ${'available with config path'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'foo'} | ${false} | ${'enable'}
- ${'available with config path, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'foo'} | ${true} | ${'enable'}
- ${'configured with config path'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'foo'} | ${false} | ${'configure'}
- ${'configured with config path, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'foo'} | ${true} | ${'configure'}
+ context | type | available | configured | configurationHelpPath | configurationPath | canEnableByMergeRequest | action
+ ${'unavailable'} | ${REPORT_TYPE_SAST} | ${false} | ${false} | ${'/help'} | ${null} | ${false} | ${null}
+ ${'available, no configurationHelpPath'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${null} | ${null} | ${false} | ${null}
+ ${'available'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'/help'} | ${null} | ${false} | ${'guide'}
+ ${'configured'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'/help'} | ${null} | ${false} | ${'guide'}
+ ${'available, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'/help'} | ${null} | ${true} | ${'create-mr'}
+ ${'available, can enable by MR, unknown type'} | ${'foo'} | ${true} | ${false} | ${'/help'} | ${null} | ${true} | ${'guide'}
+ ${'configured, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'/help'} | ${null} | ${true} | ${'guide'}
+ ${'available with config path'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'/help'} | ${'foo'} | ${false} | ${'enable'}
+ ${'available with config path, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'/help'} | ${'foo'} | ${true} | ${'enable'}
+ ${'configured with config path'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'/help'} | ${'foo'} | ${false} | ${'configure'}
+ ${'configured with config path, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'/help'} | ${'foo'} | ${true} | ${'configure'}
`(
'given $context feature',
- ({ type, available, configured, configurationPath, canEnableByMergeRequest, action }) => {
+ ({
+ type,
+ available,
+ configured,
+ configurationHelpPath,
+ configurationPath,
+ canEnableByMergeRequest,
+ action,
+ }) => {
beforeEach(() => {
feature = makeFeature({
type,
available,
configured,
+ configurationHelpPath,
configurationPath,
canEnableByMergeRequest,
});
diff --git a/spec/frontend/security_configuration/components/upgrade_banner_spec.js b/spec/frontend/security_configuration/components/upgrade_banner_spec.js
index cf7945343af..a35fded72fb 100644
--- a/spec/frontend/security_configuration/components/upgrade_banner_spec.js
+++ b/spec/frontend/security_configuration/components/upgrade_banner_spec.js
@@ -43,11 +43,11 @@ describe('UpgradeBanner component', () => {
it('renders the list of benefits', () => {
const wrapperText = wrapper.text();
- expect(wrapperText).toContain('GitLab Ultimate checks your application');
+ expect(wrapperText).toContain('Immediately begin risk analysis and remediation');
expect(wrapperText).toContain('statistics in the merge request');
expect(wrapperText).toContain('statistics across projects');
expect(wrapperText).toContain('Runtime security metrics');
- expect(wrapperText).toContain('risk analysis and remediation');
+ expect(wrapperText).toContain('More scan types, including Container Scanning,');
});
it(`re-emits GlBanner's close event`, () => {
diff --git a/spec/frontend/security_configuration/configuration_table_spec.js b/spec/frontend/security_configuration/configuration_table_spec.js
deleted file mode 100644
index fbd72265c4b..00000000000
--- a/spec/frontend/security_configuration/configuration_table_spec.js
+++ /dev/null
@@ -1,52 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import ConfigurationTable from '~/security_configuration/components/configuration_table.vue';
-import { scanners, UPGRADE_CTA } from '~/security_configuration/components/constants';
-
-import {
- REPORT_TYPE_SAST,
- REPORT_TYPE_SECRET_DETECTION,
-} from '~/vue_shared/security_reports/constants';
-
-describe('Configuration Table Component', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = extendedWrapper(
- mount(ConfigurationTable, {
- provide: {
- projectPath: 'testProjectPath',
- },
- }),
- );
- };
-
- const findHelpLinks = () => wrapper.findAll('[data-testid="help-link"]');
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- beforeEach(() => {
- createComponent();
- });
-
- describe.each(scanners.map((scanner, i) => [scanner, i]))('given scanner %s', (scanner, i) => {
- it('should match strings', () => {
- expect(wrapper.text()).toContain(scanner.name);
- expect(wrapper.text()).toContain(scanner.description);
- if (scanner.type === REPORT_TYPE_SAST) {
- expect(wrapper.findByTestId(scanner.type).text()).toBe('Configure via Merge Request');
- } else if (scanner.type === REPORT_TYPE_SECRET_DETECTION) {
- expect(wrapper.findByTestId(scanner.type).exists()).toBe(false);
- } else {
- expect(wrapper.findByTestId(scanner.type).text()).toMatchInterpolatedText(UPGRADE_CTA);
- }
- });
-
- it('should show expected help link', () => {
- const helpLink = findHelpLinks().at(i);
- expect(helpLink.attributes('href')).toBe(scanner.helpPath);
- });
- });
-});
diff --git a/spec/frontend/security_configuration/upgrade_spec.js b/spec/frontend/security_configuration/upgrade_spec.js
deleted file mode 100644
index 20bb38aa469..00000000000
--- a/spec/frontend/security_configuration/upgrade_spec.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { UPGRADE_CTA } from '~/security_configuration/components/constants';
-import Upgrade from '~/security_configuration/components/upgrade.vue';
-
-const TEST_URL = 'http://www.example.test';
-let wrapper;
-const createComponent = (componentData = {}) => {
- wrapper = mount(Upgrade, componentData);
-};
-
-afterEach(() => {
- wrapper.destroy();
-});
-
-describe('Upgrade component', () => {
- beforeEach(() => {
- createComponent({ provide: { upgradePath: TEST_URL } });
- });
-
- it('renders correct text in link', () => {
- expect(wrapper.text()).toMatchInterpolatedText(UPGRADE_CTA);
- });
-
- it('renders link with correct default attributes', () => {
- expect(wrapper.find('a').attributes()).toMatchObject({
- href: TEST_URL,
- target: '_blank',
- });
- });
-});
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
index 523f4e88985..1a874c3dcd6 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
@@ -30,8 +30,13 @@ exports[`self monitor component When the self monitor project has not been creat
class="js-section-sub-header"
>
- Enable or disable instance self monitoring
-
+ Activate or deactivate instance self monitoring.
+
+ <gl-link-stub
+ href="/help/administration/monitoring/gitlab_self_monitoring_project/index"
+ >
+ Learn more.
+ </gl-link-stub>
</p>
</div>
@@ -42,14 +47,14 @@ exports[`self monitor component When the self monitor project has not been creat
name="self-monitoring-form"
>
<p>
- Enabling this feature creates a project that can be used to monitor the health of your instance.
+ Activate self monitoring to create a project to use to monitor the health of your instance.
</p>
<gl-form-group-stub
labeldescription=""
>
<gl-toggle-stub
- label="Create Project"
+ label="Self monitoring"
labelposition="top"
/>
</gl-form-group-stub>
@@ -62,15 +67,15 @@ exports[`self monitor component When the self monitor project has not been creat
dismisslabel="Close"
modalclass=""
modalid="delete-self-monitor-modal"
- ok-title="Delete project"
+ ok-title="Delete self monitoring project"
ok-variant="danger"
size="md"
- title="Disable self monitoring?"
+ title="Deactivate self monitoring?"
titletag="h4"
>
<div>
- Disabling this feature will delete the self monitoring project. Are you sure you want to delete the project?
+ Deactivating self monitoring deletes the self monitoring project. Are you sure you want to deactivate self monitoring and delete the project?
</div>
</gl-modal-stub>
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index e6962e4c453..89ad5a00a14 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -53,7 +53,7 @@ describe('self monitor component', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.find('.js-section-sub-header').text()).toContain(
- 'Enable or disable instance self monitoring',
+ 'Activate or deactivate instance self monitoring.',
);
});
});
@@ -63,7 +63,7 @@ describe('self monitor component', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.vm.selfMonitoringFormText).toContain(
- 'Enabling this feature creates a project that can be used to monitor the health of your instance.',
+ 'Activate self monitoring to create a project to use to monitor the health of your instance.',
);
});
diff --git a/spec/frontend/self_monitor/store/actions_spec.js b/spec/frontend/self_monitor/store/actions_spec.js
index 29181e15680..6bcb2a713ea 100644
--- a/spec/frontend/self_monitor/store/actions_spec.js
+++ b/spec/frontend/self_monitor/store/actions_spec.js
@@ -134,7 +134,7 @@ describe('self monitor actions', () => {
payload: {
actionName: 'viewSelfMonitorProject',
actionText: 'View project',
- message: 'Self monitoring project has been successfully created.',
+ message: 'Self monitoring project successfully created.',
},
},
{ type: types.SET_SHOW_ALERT, payload: true },
@@ -245,7 +245,7 @@ describe('self monitor actions', () => {
payload: {
actionName: 'createProject',
actionText: 'Undo',
- message: 'Self monitoring project has been successfully deleted.',
+ message: 'Self monitoring project successfully deleted.',
},
},
{ type: types.SET_SHOW_ALERT, payload: true },
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
index 5a3a152d201..69f6a6e6e04 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
@@ -81,30 +81,33 @@ describe('AssigneeAvatarLink component', () => {
);
describe.each`
- tooltipHasName | availability | canMerge | expected
- ${true} | ${'Busy'} | ${false} | ${'Root (Busy) (cannot merge)'}
- ${true} | ${'Busy'} | ${true} | ${'Root (Busy)'}
- ${true} | ${''} | ${false} | ${'Root (cannot merge)'}
- ${true} | ${''} | ${true} | ${'Root'}
- ${false} | ${'Busy'} | ${false} | ${'Cannot merge'}
- ${false} | ${'Busy'} | ${true} | ${''}
- ${false} | ${''} | ${false} | ${'Cannot merge'}
- ${false} | ${''} | ${true} | ${''}
+ tooltipHasName | name | availability | canMerge | expected
+ ${true} | ${"Rabbit O'Hare"} | ${''} | ${true} | ${"Rabbit O'Hare"}
+ ${true} | ${"Rabbit O'Hare"} | ${'Busy'} | ${false} | ${"Rabbit O'Hare (Busy) (cannot merge)"}
+ ${true} | ${'Root'} | ${'Busy'} | ${false} | ${'Root (Busy) (cannot merge)'}
+ ${true} | ${'Root'} | ${'Busy'} | ${true} | ${'Root (Busy)'}
+ ${true} | ${'Root'} | ${''} | ${false} | ${'Root (cannot merge)'}
+ ${true} | ${'Root'} | ${''} | ${true} | ${'Root'}
+ ${false} | ${'Root'} | ${'Busy'} | ${false} | ${'Cannot merge'}
+ ${false} | ${'Root'} | ${'Busy'} | ${true} | ${''}
+ ${false} | ${'Root'} | ${''} | ${false} | ${'Cannot merge'}
+ ${false} | ${'Root'} | ${''} | ${true} | ${''}
`(
- "with tooltipHasName=$tooltipHasName and availability='$availability' and canMerge=$canMerge",
- ({ tooltipHasName, availability, canMerge, expected }) => {
+ "with name=$name tooltipHasName=$tooltipHasName and availability='$availability' and canMerge=$canMerge",
+ ({ name, tooltipHasName, availability, canMerge, expected }) => {
beforeEach(() => {
createComponent({
tooltipHasName,
user: {
...userDataMock(),
+ name,
can_merge: canMerge,
availability,
},
});
});
- it('sets tooltip to $expected', () => {
+ it(`sets tooltip to "${expected}"`, () => {
expect(findTooltipText()).toBe(expected);
});
},
diff --git a/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js b/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
index 747d370e1cf..6116bc68927 100644
--- a/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
+++ b/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
@@ -1,5 +1,6 @@
import { GlDropdown, GlDropdownItem, GlLoadingIcon, GlTooltip, GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import { INCIDENT_SEVERITY, ISSUABLE_TYPES } from '~/sidebar/components/severity/constants';
@@ -15,6 +16,7 @@ describe('SidebarSeverity', () => {
const projectPath = 'gitlab-org/gitlab-test';
const iid = '1';
const severity = 'CRITICAL';
+ let canUpdate = true;
function createComponent(props = {}) {
const propsData = {
@@ -25,8 +27,11 @@ describe('SidebarSeverity', () => {
...props,
};
mutate = jest.fn();
- wrapper = shallowMount(SidebarSeverity, {
+ wrapper = shallowMountExtended(SidebarSeverity, {
propsData,
+ provide: {
+ canUpdate,
+ },
mocks: {
$apollo: {
mutate,
@@ -45,22 +50,34 @@ describe('SidebarSeverity', () => {
afterEach(() => {
if (wrapper) {
wrapper.destroy();
- wrapper = null;
}
});
- const findSeverityToken = () => wrapper.findAll(SeverityToken);
- const findEditBtn = () => wrapper.find('[data-testid="editButton"]');
- const findDropdown = () => wrapper.find(GlDropdown);
- const findCriticalSeverityDropdownItem = () => wrapper.find(GlDropdownItem);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findTooltip = () => wrapper.find(GlTooltip);
+ const findSeverityToken = () => wrapper.findAllComponents(SeverityToken);
+ const findEditBtn = () => wrapper.findByTestId('editButton');
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findCriticalSeverityDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findTooltip = () => wrapper.findComponent(GlTooltip);
const findCollapsedSeverity = () => wrapper.find({ ref: 'severity' });
- it('renders severity widget', () => {
- expect(findEditBtn().exists()).toBe(true);
- expect(findSeverityToken().exists()).toBe(true);
- expect(findDropdown().exists()).toBe(true);
+ describe('Severity widget', () => {
+ it('renders severity dropdown and token', () => {
+ expect(findSeverityToken().exists()).toBe(true);
+ expect(findDropdown().exists()).toBe(true);
+ });
+
+ describe('edit button', () => {
+ it('is rendered when `canUpdate` provided as `true`', () => {
+ expect(findEditBtn().exists()).toBe(true);
+ });
+
+ it('is NOT rendered when `canUpdate` provided as `false`', () => {
+ canUpdate = false;
+ createComponent();
+ expect(findEditBtn().exists()).toBe(false);
+ });
+ });
});
describe('Update severity', () => {
@@ -100,7 +117,7 @@ describe('SidebarSeverity', () => {
);
findCriticalSeverityDropdownItem().vm.$emit('click');
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findLoadingIcon().exists()).toBe(true);
resolvePromise();
@@ -128,27 +145,29 @@ describe('SidebarSeverity', () => {
it('should expand the dropdown on collapsed icon click', async () => {
wrapper.vm.isDropdownShowing = false;
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDropdown().classes(HIDDDEN_CLASS)).toBe(true);
findCollapsedSeverity().trigger('click');
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDropdown().classes(SHOWN_CLASS)).toBe(true);
});
});
describe('expanded', () => {
it('toggles dropdown with edit button', async () => {
+ canUpdate = true;
+ createComponent();
wrapper.vm.isDropdownShowing = false;
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDropdown().classes(HIDDDEN_CLASS)).toBe(true);
findEditBtn().vm.$emit('click');
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDropdown().classes(SHOWN_CLASS)).toBe(true);
findEditBtn().vm.$emit('click');
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDropdown().classes(HIDDDEN_CLASS)).toBe(true);
});
});
diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
index f5e5ab4a984..ca6e5ac5e7f 100644
--- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
+++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
@@ -12,11 +12,13 @@ import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { IssuableType } from '~/issue_show/constants';
+import { timeFor } from '~/lib/utils/datetime_utility';
import SidebarDropdownWidget from '~/sidebar/components/sidebar_dropdown_widget.vue';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
import { IssuableAttributeType } from '~/sidebar/constants';
@@ -54,6 +56,7 @@ describe('SidebarDropdownWidget', () => {
const mutationSuccessWithErrors = () => jest.fn().mockResolvedValue({ data: promiseWithErrors });
const findGlLink = () => wrapper.findComponent(GlLink);
+ const findDateTooltip = () => getBinding(findGlLink().element, 'gl-tooltip');
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownText = () => wrapper.findComponent(GlDropdownText);
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
@@ -155,6 +158,9 @@ describe('SidebarDropdownWidget', () => {
},
},
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
stubs: {
SidebarEditableItem,
GlSearchBoxByType,
@@ -177,7 +183,7 @@ describe('SidebarDropdownWidget', () => {
beforeEach(() => {
createComponent({
data: {
- currentAttribute: { id: 'id', title: 'title', webUrl: 'webUrl' },
+ currentAttribute: { id: 'id', title: 'title', webUrl: 'webUrl', dueDate: '2021-09-09' },
},
stubs: {
GlDropdown,
@@ -223,6 +229,10 @@ describe('SidebarDropdownWidget', () => {
expect(findSelectedAttribute().text()).toBe('Some milestone title');
});
+ it('displays time for milestone due date in tooltip', () => {
+ expect(findDateTooltip().value).toBe(timeFor('2021-09-09'));
+ });
+
describe('when current attribute does not exist', () => {
it('renders "None" as the selected attribute title', () => {
createComponent();
@@ -451,7 +461,6 @@ describe('SidebarDropdownWidget', () => {
expect(projectMilestonesSpy).toHaveBeenNthCalledWith(1, {
fullPath: mockIssue.projectPath,
- sort: null,
state: 'active',
title: '',
});
@@ -478,7 +487,6 @@ describe('SidebarDropdownWidget', () => {
expect(projectMilestonesSpy).toHaveBeenNthCalledWith(2, {
fullPath: mockIssue.projectPath,
- sort: null,
state: 'active',
title: mockSearchTerm,
});
diff --git a/spec/frontend/sidebar/components/time_tracking/mock_data.js b/spec/frontend/sidebar/components/time_tracking/mock_data.js
index 862bcbe861e..938750bd58b 100644
--- a/spec/frontend/sidebar/components/time_tracking/mock_data.js
+++ b/spec/frontend/sidebar/components/time_tracking/mock_data.js
@@ -16,9 +16,10 @@ export const getIssueTimelogsQueryResponse = {
},
spentAt: '2020-05-01T00:00:00Z',
note: {
- body: 'I paired with @root on this last week.',
+ body: 'A note',
__typename: 'Note',
},
+ summary: 'A summary',
},
{
__typename: 'Timelog',
@@ -29,6 +30,7 @@ export const getIssueTimelogsQueryResponse = {
},
spentAt: '2021-05-07T13:19:01Z',
note: null,
+ summary: 'A summary',
},
{
__typename: 'Timelog',
@@ -39,9 +41,10 @@ export const getIssueTimelogsQueryResponse = {
},
spentAt: '2021-05-01T00:00:00Z',
note: {
- body: 'I did some work on this last week.',
+ body: 'A note',
__typename: 'Note',
},
+ summary: null,
},
],
__typename: 'TimelogConnection',
@@ -70,6 +73,7 @@ export const getMrTimelogsQueryResponse = {
body: 'Thirty minutes!',
__typename: 'Note',
},
+ summary: null,
},
{
__typename: 'Timelog',
@@ -80,6 +84,7 @@ export const getMrTimelogsQueryResponse = {
},
spentAt: '2021-05-07T14:44:39Z',
note: null,
+ summary: null,
},
{
__typename: 'Timelog',
@@ -93,6 +98,7 @@ export const getMrTimelogsQueryResponse = {
body: 'A note with some time',
__typename: 'Note',
},
+ summary: null,
},
],
__typename: 'TimelogConnection',
diff --git a/spec/frontend/sidebar/components/time_tracking/report_spec.js b/spec/frontend/sidebar/components/time_tracking/report_spec.js
index 710fae8ddf7..66218626e6b 100644
--- a/spec/frontend/sidebar/components/time_tracking/report_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/report_spec.js
@@ -74,6 +74,8 @@ describe('Issuable Time Tracking Report', () => {
expect(getAllByRole(wrapper.element, 'row', { name: /John Doe18/i })).toHaveLength(1);
expect(getAllByRole(wrapper.element, 'row', { name: /Administrator/i })).toHaveLength(2);
+ expect(getAllByRole(wrapper.element, 'row', { name: /A note/i })).toHaveLength(1);
+ expect(getAllByRole(wrapper.element, 'row', { name: /A summary/i })).toHaveLength(2);
});
});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 9fab24d7518..1ebd3c622ca 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -415,7 +415,7 @@ const mockUser1 = {
status: null,
};
-const mockUser2 = {
+export const mockUser2 = {
id: 'gid://gitlab/User/4',
avatarUrl: '/avatar2',
name: 'rookie',
@@ -452,9 +452,40 @@ export const projectMembersResponse = {
null,
null,
// Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
- mockUser1,
- mockUser1,
- mockUser2,
+ { user: mockUser1 },
+ { user: mockUser1 },
+ { user: mockUser2 },
+ {
+ user: {
+ id: 'gid://gitlab/User/2',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
+ name: 'Jacki Kub',
+ username: 'francina.skiles',
+ webUrl: '/franc',
+ status: {
+ availability: 'BUSY',
+ },
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const groupMembersResponse = {
+ data: {
+ workspace: {
+ __typename: 'roup',
+ users: {
+ nodes: [
+ // Remove nulls https://gitlab.com/gitlab-org/gitlab/-/issues/329750
+ null,
+ null,
+ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
+ { user: mockUser1 },
+ { user: mockUser1 },
{
user: {
id: 'gid://gitlab/User/2',
@@ -531,6 +562,7 @@ export const mockMilestone1 = {
webUrl: 'http://gdk.test:3000/groups/gitlab-org/-/milestones/1',
state: 'active',
expired: false,
+ dueDate: '2030-09-09',
};
export const mockMilestone2 = {
@@ -540,6 +572,7 @@ export const mockMilestone2 = {
webUrl: 'http://gdk.test:3000/groups/gitlab-org/-/milestones/2',
state: 'active',
expired: false,
+ dueDate: '2030-09-09',
};
export const mockProjectMilestonesResponse = {
@@ -554,6 +587,19 @@ export const mockProjectMilestonesResponse = {
},
};
+export const mockGroupMilestonesResponse = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Group/1',
+ attributes: {
+ nodes: [mockMilestone1, mockMilestone2],
+ },
+ __typename: 'MilestoneConnection',
+ },
+ __typename: 'Group',
+ },
+};
+
export const noCurrentMilestoneResponse = {
data: {
workspace: {
@@ -574,6 +620,7 @@ export const mockMilestoneMutationResponse = {
title: 'Awesome Milestone',
state: 'active',
expired: false,
+ dueDate: '2030-09-09',
__typename: 'Milestone',
},
__typename: 'Issue',
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index e6162c6aad2..b7b638b5137 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -71,7 +71,9 @@ describe('Snippet view app', () => {
it('renders correct snippet-blob components', () => {
createComponent({
data: {
- blobs: [Blob, BinaryBlob],
+ snippet: {
+ blobs: [Blob, BinaryBlob],
+ },
},
});
const blobs = wrapper.findAll(SnippetBlob);
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 585614a6b79..fb95be3a77c 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -1,6 +1,7 @@
-import { GlButton, GlModal } from '@gitlab/ui';
+import { GlButton, GlModal, GlDropdown } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { ApolloMutation } from 'vue-apollo';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
import { differenceInMilliseconds } from '~/lib/utils/datetime_utility';
@@ -16,6 +17,7 @@ describe('Snippet header component', () => {
let errorMsg;
let err;
const originalRelativeUrlRoot = gon.relative_url_root;
+ const reportAbusePath = '/-/snippets/42/mark_as_spam';
const GlEmoji = { template: '<img/>' };
@@ -24,6 +26,7 @@ describe('Snippet header component', () => {
permissions = {},
mutationRes = mutationTypes.RESOLVE,
snippetProps = {},
+ provide = {},
} = {}) {
const defaultProps = Object.assign(snippet, snippetProps);
if (permissions) {
@@ -42,6 +45,10 @@ describe('Snippet header component', () => {
wrapper = mount(SnippetHeader, {
mocks: { $apollo },
+ provide: {
+ reportAbusePath,
+ ...provide,
+ },
propsData: {
snippet: {
...defaultProps,
@@ -54,9 +61,27 @@ describe('Snippet header component', () => {
});
}
- const findAuthorEmoji = () => wrapper.find(GlEmoji);
+ const findAuthorEmoji = () => wrapper.findComponent(GlEmoji);
const findAuthoredMessage = () => wrapper.find('[data-testid="authored-message"]').text();
- const buttonCount = () => wrapper.findAll(GlButton).length;
+ const findButtons = () => wrapper.findAllComponents(GlButton);
+ const findButtonsAsModel = () =>
+ findButtons().wrappers.map((x) => ({
+ text: x.text(),
+ href: x.attributes('href'),
+ category: x.props('category'),
+ variant: x.props('variant'),
+ disabled: x.props('disabled'),
+ }));
+ const findResponsiveDropdown = () => wrapper.findComponent(GlDropdown);
+ // We can't search by component here since we are full mounting and the attributes are applied to a child of the GlDropdownItem
+ const findResponsiveDropdownItems = () => findResponsiveDropdown().findAll('[role="menuitem"]');
+ const findResponsiveDropdownItemsAsModel = () =>
+ findResponsiveDropdownItems().wrappers.map((x) => ({
+ disabled: x.attributes('disabled'),
+ href: x.attributes('href'),
+ title: x.attributes('title'),
+ text: x.text(),
+ }));
beforeEach(() => {
gon.relative_url_root = '/foo/';
@@ -141,42 +166,108 @@ describe('Snippet header component', () => {
expect(text).toBe('Authored 1 month ago');
});
- it('renders action buttons based on permissions', () => {
- createComponent({
- permissions: {
- adminSnippet: false,
- updateSnippet: false,
+ it('renders a action buttons', () => {
+ createComponent();
+
+ expect(findButtonsAsModel()).toEqual([
+ {
+ category: 'primary',
+ disabled: false,
+ href: `${snippet.webUrl}/edit`,
+ text: 'Edit',
+ variant: 'default',
},
- });
- expect(buttonCount()).toEqual(0);
+ {
+ category: 'secondary',
+ disabled: false,
+ text: 'Delete',
+ variant: 'danger',
+ },
+ {
+ category: 'primary',
+ disabled: false,
+ href: reportAbusePath,
+ text: 'Submit as spam',
+ variant: 'default',
+ },
+ ]);
+ });
- createComponent({
- permissions: {
- adminSnippet: true,
- updateSnippet: false,
+ it('renders responsive dropdown for action buttons', () => {
+ createComponent();
+
+ expect(findResponsiveDropdownItemsAsModel()).toEqual([
+ {
+ href: `${snippet.webUrl}/edit`,
+ text: 'Edit',
},
- });
- expect(buttonCount()).toEqual(1);
+ {
+ text: 'Delete',
+ },
+ {
+ href: reportAbusePath,
+ text: 'Submit as spam',
+ title: 'Submit as spam',
+ },
+ ]);
+ });
+ it.each`
+ permissions | buttons
+ ${{ adminSnippet: false, updateSnippet: false }} | ${['Submit as spam']}
+ ${{ adminSnippet: true, updateSnippet: false }} | ${['Delete', 'Submit as spam']}
+ ${{ adminSnippet: false, updateSnippet: true }} | ${['Edit', 'Submit as spam']}
+ `('with permissions ($permissions), renders buttons ($buttons)', ({ permissions, buttons }) => {
createComponent({
permissions: {
- adminSnippet: true,
- updateSnippet: true,
+ ...permissions,
},
});
- expect(buttonCount()).toEqual(2);
- createComponent({
- permissions: {
- adminSnippet: true,
- updateSnippet: true,
- },
+ expect(findButtonsAsModel().map((x) => x.text)).toEqual(buttons);
+ });
+
+ it('with canCreateSnippet permission, renders create button', async () => {
+ createComponent();
+
+ // TODO: we should avoid `wrapper.setData` since they
+ // are component internals. Let's use the apollo mock helpers
+ // in a follow-up.
+ wrapper.setData({ canCreateSnippet: true });
+ await wrapper.vm.$nextTick();
+
+ expect(findButtonsAsModel()).toEqual(
+ expect.arrayContaining([
+ {
+ category: 'secondary',
+ disabled: false,
+ href: `/foo/-/snippets/new`,
+ text: 'New snippet',
+ variant: 'success',
+ },
+ ]),
+ );
+ });
+
+ describe('with guest user', () => {
+ beforeEach(() => {
+ createComponent({
+ permissions: {
+ adminSnippet: false,
+ updateSnippet: false,
+ },
+ provide: {
+ reportAbusePath: null,
+ },
+ });
});
- wrapper.setData({
- canCreateSnippet: true,
+
+ it('does not show any action buttons', () => {
+ expect(findButtons()).toHaveLength(0);
});
- return wrapper.vm.$nextTick().then(() => {
- expect(buttonCount()).toEqual(3);
+
+ it('does not show responsive action dropdown', () => {
+ expect(findResponsiveDropdown().exists()).toBe(false);
});
});
@@ -200,19 +291,6 @@ describe('Snippet header component', () => {
});
describe('Delete mutation', () => {
- const { location } = window;
-
- beforeEach(() => {
- delete window.location;
- window.location = {
- pathname: '',
- };
- });
-
- afterEach(() => {
- window.location = location;
- });
-
it('dispatches a mutation to delete the snippet with correct variables', () => {
createComponent();
wrapper.vm.deleteSnippet();
@@ -231,6 +309,8 @@ describe('Snippet header component', () => {
});
describe('in case of successful mutation, closes modal and redirects to correct listing', () => {
+ useMockLocationHelper();
+
const createDeleteSnippet = (snippetProps = {}) => {
createComponent({
snippetProps,
diff --git a/spec/frontend/syntax_highlight_spec.js b/spec/frontend/syntax_highlight_spec.js
index 418679e7d18..8ad4f8d5c70 100644
--- a/spec/frontend/syntax_highlight_spec.js
+++ b/spec/frontend/syntax_highlight_spec.js
@@ -10,39 +10,50 @@ describe('Syntax Highlighter', () => {
}
return (window.gon.user_color_scheme = value);
};
- describe('on a js-syntax-highlight element', () => {
- beforeEach(() => {
- setFixtures('<div class="js-syntax-highlight"></div>');
- });
-
- it('applies syntax highlighting', () => {
- stubUserColorScheme('monokai');
- syntaxHighlight($('.js-syntax-highlight'));
- expect($('.js-syntax-highlight')).toHaveClass('monokai');
+ // We have to bind `document.querySelectorAll` to `document` to not mess up the fn's context
+ describe.each`
+ desc | fn
+ ${'jquery'} | ${$}
+ ${'vanilla all'} | ${document.querySelectorAll.bind(document)}
+ ${'vanilla single'} | ${document.querySelector.bind(document)}
+ `('highlight using $desc syntax', ({ fn }) => {
+ describe('on a js-syntax-highlight element', () => {
+ beforeEach(() => {
+ setFixtures('<div class="js-syntax-highlight"></div>');
+ });
+
+ it('applies syntax highlighting', () => {
+ stubUserColorScheme('monokai');
+ syntaxHighlight(fn('.js-syntax-highlight'));
+
+ expect(fn('.js-syntax-highlight')).toHaveClass('monokai');
+ });
});
- });
- describe('on a parent element', () => {
- beforeEach(() => {
- setFixtures(
- '<div class="parent">\n <div class="js-syntax-highlight"></div>\n <div class="foo"></div>\n <div class="js-syntax-highlight"></div>\n</div>',
- );
- });
+ describe('on a parent element', () => {
+ beforeEach(() => {
+ setFixtures(
+ '<div class="parent">\n <div class="js-syntax-highlight"></div>\n <div class="foo"></div>\n <div class="js-syntax-highlight"></div>\n</div>',
+ );
+ });
- it('applies highlighting to all applicable children', () => {
- stubUserColorScheme('monokai');
- syntaxHighlight($('.parent'));
+ it('applies highlighting to all applicable children', () => {
+ stubUserColorScheme('monokai');
+ syntaxHighlight(fn('.parent'));
- expect($('.parent, .foo')).not.toHaveClass('monokai');
- expect($('.monokai').length).toBe(2);
- });
+ expect(fn('.parent')).not.toHaveClass('monokai');
+ expect(fn('.foo')).not.toHaveClass('monokai');
+
+ expect(document.querySelectorAll('.monokai').length).toBe(2);
+ });
- it('prevents an infinite loop when no matches exist', () => {
- setFixtures('<div></div>');
- const highlight = () => syntaxHighlight($('div'));
+ it('prevents an infinite loop when no matches exist', () => {
+ setFixtures('<div></div>');
+ const highlight = () => syntaxHighlight(fn('div'));
- expect(highlight).not.toThrow();
+ expect(highlight).not.toThrow();
+ });
});
});
});
diff --git a/spec/frontend/terraform/components/empty_state_spec.js b/spec/frontend/terraform/components/empty_state_spec.js
index c86160e18f3..1637ac2039c 100644
--- a/spec/frontend/terraform/components/empty_state_spec.js
+++ b/spec/frontend/terraform/components/empty_state_spec.js
@@ -1,4 +1,4 @@
-import { GlEmptyState, GlSprintf } from '@gitlab/ui';
+import { GlEmptyState, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import EmptyState from '~/terraform/components/empty_state.vue';
@@ -8,19 +8,20 @@ describe('EmptyStateComponent', () => {
const propsData = {
image: '/image/path',
};
+ const docsUrl = '/help/user/infrastructure/terraform_state';
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findLink = () => wrapper.findComponent(GlLink);
beforeEach(() => {
- wrapper = shallowMount(EmptyState, { propsData, stubs: { GlEmptyState, GlSprintf } });
- return wrapper.vm.$nextTick();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
+ wrapper = shallowMount(EmptyState, { propsData, stubs: { GlEmptyState, GlLink } });
});
it('should render content', () => {
- expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(true);
expect(wrapper.text()).toContain('Get started with Terraform');
});
+
+ it('should have a link to the GitLab managed Terraform States docs', () => {
+ expect(findLink().attributes('href')).toBe(docsUrl);
+ });
});
diff --git a/spec/frontend/terraform/components/init_command_modal_spec.js b/spec/frontend/terraform/components/init_command_modal_spec.js
new file mode 100644
index 00000000000..dbdff899bac
--- /dev/null
+++ b/spec/frontend/terraform/components/init_command_modal_spec.js
@@ -0,0 +1,79 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import InitCommandModal from '~/terraform/components/init_command_modal.vue';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
+
+const accessTokensPath = '/path/to/access-tokens-page';
+const terraformApiUrl = 'https://gitlab.com/api/v4/projects/1';
+const username = 'username';
+const modalId = 'fake-modal-id';
+const stateName = 'production';
+const modalInfoCopyStr = `export GITLAB_ACCESS_TOKEN=<YOUR-ACCESS-TOKEN>
+terraform init \\
+ -backend-config="address=${terraformApiUrl}/${stateName}" \\
+ -backend-config="lock_address=${terraformApiUrl}/${stateName}/lock" \\
+ -backend-config="unlock_address=${terraformApiUrl}/${stateName}/lock" \\
+ -backend-config="username=${username}" \\
+ -backend-config="password=$GITLAB_ACCESS_TOKEN" \\
+ -backend-config="lock_method=POST" \\
+ -backend-config="unlock_method=DELETE" \\
+ -backend-config="retry_wait_min=5"
+ `;
+
+describe('InitCommandModal', () => {
+ let wrapper;
+
+ const propsData = {
+ modalId,
+ stateName,
+ };
+ const provideData = {
+ accessTokensPath,
+ terraformApiUrl,
+ username,
+ };
+
+ const findExplanatoryText = () => wrapper.findByTestId('init-command-explanatory-text');
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findInitCommand = () => wrapper.findByTestId('terraform-init-command');
+ const findCopyButton = () => wrapper.findComponent(ModalCopyButton);
+
+ beforeEach(() => {
+ wrapper = shallowMountExtended(InitCommandModal, {
+ propsData,
+ provide: provideData,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('on rendering', () => {
+ it('renders the explanatory text', () => {
+ expect(findExplanatoryText().text()).toContain('personal access token');
+ });
+
+ it('renders the personal access token link', () => {
+ expect(findLink().attributes('href')).toBe(accessTokensPath);
+ });
+
+ it('renders the init command with the username and state name prepopulated', () => {
+ expect(findInitCommand().text()).toContain(username);
+ expect(findInitCommand().text()).toContain(stateName);
+ });
+
+ it('renders the copyToClipboard button', () => {
+ expect(findCopyButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when copy button is clicked', () => {
+ it('copies init command to clipboard', () => {
+ expect(findCopyButton().props('text')).toBe(modalInfoCopyStr);
+ });
+ });
+});
diff --git a/spec/frontend/terraform/components/states_table_actions_spec.js b/spec/frontend/terraform/components/states_table_actions_spec.js
index 61f6e9f0f7b..9d28e8ce294 100644
--- a/spec/frontend/terraform/components/states_table_actions_spec.js
+++ b/spec/frontend/terraform/components/states_table_actions_spec.js
@@ -3,6 +3,7 @@ import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import InitCommandModal from '~/terraform/components/init_command_modal.vue';
import StateActions from '~/terraform/components/states_table_actions.vue';
import lockStateMutation from '~/terraform/graphql/mutations/lock_state.mutation.graphql';
import removeStateMutation from '~/terraform/graphql/mutations/remove_state.mutation.graphql';
@@ -73,12 +74,14 @@ describe('StatesTableActions', () => {
return wrapper.vm.$nextTick();
};
- const findActionsDropdown = () => wrapper.find(GlDropdown);
+ const findActionsDropdown = () => wrapper.findComponent(GlDropdown);
+ const findCopyBtn = () => wrapper.find('[data-testid="terraform-state-copy-init-command"]');
+ const findCopyModal = () => wrapper.findComponent(InitCommandModal);
const findLockBtn = () => wrapper.find('[data-testid="terraform-state-lock"]');
const findUnlockBtn = () => wrapper.find('[data-testid="terraform-state-unlock"]');
const findDownloadBtn = () => wrapper.find('[data-testid="terraform-state-download"]');
const findRemoveBtn = () => wrapper.find('[data-testid="terraform-state-remove"]');
- const findRemoveModal = () => wrapper.find(GlModal);
+ const findRemoveModal = () => wrapper.findComponent(GlModal);
beforeEach(() => {
return createComponent();
@@ -125,6 +128,25 @@ describe('StatesTableActions', () => {
});
});
+ describe('copy command button', () => {
+ it('displays a copy init command button', () => {
+ expect(findCopyBtn().text()).toBe('Copy Terraform init command');
+ });
+
+ describe('when clicking the copy init command button', () => {
+ beforeEach(() => {
+ findCopyBtn().vm.$emit('click');
+
+ return waitForPromises();
+ });
+
+ it('opens the modal', async () => {
+ expect(findCopyModal().exists()).toBe(true);
+ expect(findCopyModal().isVisible()).toBe(true);
+ });
+ });
+ });
+
describe('download button', () => {
it('displays a download button', () => {
expect(findDownloadBtn().text()).toBe('Download JSON');
@@ -253,7 +275,7 @@ describe('StatesTableActions', () => {
it('displays a remove modal', () => {
expect(findRemoveModal().text()).toContain(
- `You are about to remove the State file ${defaultProps.state.name}`,
+ `You are about to remove the state file ${defaultProps.state.name}`,
);
});
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 9b95ed6b816..4d1b0f54e42 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -3,6 +3,8 @@ import * as jqueryMatchers from 'custom-jquery-matchers';
import Vue from 'vue';
import 'jquery';
import { setGlobalDateToFakeDate } from 'helpers/fake_date';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import Translate from '~/vue_shared/translate';
import { getJSONFixture, loadHTMLFixture, setHTMLFixture } from './__helpers__/fixtures';
import { initializeTestTimeout } from './__helpers__/timeout';
@@ -88,8 +90,13 @@ Object.assign(global, {
},
});
-// make sure that each test actually tests something
-// see https://jestjs.io/docs/en/expect#expecthasassertions
beforeEach(() => {
+ // make sure that each test actually tests something
+ // see https://jestjs.io/docs/en/expect#expecthasassertions
expect.hasAssertions();
+
+ // Reset the mocked window.location. This ensures tests don't interfere with
+ // each other, and removes the need to tidy up if it was changed for a given
+ // test.
+ setWindowLocation(TEST_HOST);
});
diff --git a/spec/frontend/token_access/token_access_spec.js b/spec/frontend/token_access/token_access_spec.js
index c7323eb19fe..c4e29a52f1c 100644
--- a/spec/frontend/token_access/token_access_spec.js
+++ b/spec/frontend/token_access/token_access_spec.js
@@ -1,7 +1,8 @@
import { GlToggle, GlLoadingIcon } from '@gitlab/ui';
-import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
+import { createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import TokenAccess from '~/token_access/components/token_access.vue';
@@ -41,15 +42,15 @@ describe('TokenAccess component', () => {
const findToggle = () => wrapper.findComponent(GlToggle);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findAddProjectBtn = () => wrapper.find('[data-testid="add-project-button"]');
- const findRemoveProjectBtn = () => wrapper.find('[data-testid="remove-project-button"]');
+ const findAddProjectBtn = () => wrapper.findByRole('button', { name: 'Add project' });
+ const findRemoveProjectBtn = () => wrapper.findByRole('button', { name: 'Remove access' });
const findTokenSection = () => wrapper.find('[data-testid="token-section"]');
const createMockApolloProvider = (requestHandlers) => {
return createMockApollo(requestHandlers);
};
- const createComponent = (requestHandlers, mountFn = shallowMount) => {
+ const createComponent = (requestHandlers, mountFn = shallowMountExtended) => {
wrapper = mountFn(TokenAccess, {
localVue,
provide: {
@@ -138,7 +139,7 @@ describe('TokenAccess component', () => {
[getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
[addProjectCIJobTokenScopeMutation, addProjectSuccessHandler],
],
- mount,
+ mountExtended,
);
await waitForPromises();
@@ -160,7 +161,7 @@ describe('TokenAccess component', () => {
[getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
[addProjectCIJobTokenScopeMutation, addProjectFailureHandler],
],
- mount,
+ mountExtended,
);
await waitForPromises();
@@ -181,7 +182,7 @@ describe('TokenAccess component', () => {
[getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
[removeProjectCIJobTokenScopeMutation, removeProjectSuccessHandler],
],
- mount,
+ mountExtended,
);
await waitForPromises();
@@ -203,7 +204,7 @@ describe('TokenAccess component', () => {
[getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
[removeProjectCIJobTokenScopeMutation, removeProjectFailureHandler],
],
- mount,
+ mountExtended,
);
await waitForPromises();
diff --git a/spec/frontend/tooltips/components/tooltips_spec.js b/spec/frontend/tooltips/components/tooltips_spec.js
index c44918ceaf3..9b703b74a1a 100644
--- a/spec/frontend/tooltips/components/tooltips_spec.js
+++ b/spec/frontend/tooltips/components/tooltips_spec.js
@@ -4,7 +4,7 @@ import { useMockMutationObserver } from 'helpers/mock_dom_observer';
import Tooltips from '~/tooltips/components/tooltips.vue';
describe('tooltips/components/tooltips.vue', () => {
- const { trigger: triggerMutate, observersCount } = useMockMutationObserver();
+ const { trigger: triggerMutate } = useMockMutationObserver();
let wrapper;
const buildWrapper = () => {
@@ -211,11 +211,14 @@ describe('tooltips/components/tooltips.vue', () => {
it('disconnects mutation observer on beforeDestroy', () => {
buildWrapper();
wrapper.vm.addTooltips([createTooltipTarget()]);
+ const { observer } = wrapper.vm;
+ jest.spyOn(observer, 'disconnect');
- expect(observersCount()).toBe(1);
+ expect(observer.disconnect).toHaveBeenCalledTimes(0);
wrapper.destroy();
- expect(observersCount()).toBe(0);
+
+ expect(observer.disconnect).toHaveBeenCalledTimes(1);
});
it('exposes hidden event', async () => {
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index 13498cfb823..a17efdd61a9 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -387,11 +387,13 @@ describe('Tracking', () => {
beforeEach(() => {
eventSpy = jest.spyOn(Tracking, 'event');
setHTMLFixture(`
- <input data-track-${term}="render" data-track-label="label1" value=1 data-track-property="_property_"/>
- <span data-track-${term}="render" data-track-label="label2" data-track-value=1>
- Something
- </span>
- <input data-track-${term}="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_"/>
+ <div data-track-${term}="click_link" data-track-label="all_nested_links">
+ <input data-track-${term}="render" data-track-label="label1" value=1 data-track-property="_property_"/>
+ <span data-track-${term}="render" data-track-label="label2" data-track-value=1>
+ <a href="#" id="link">Something</a>
+ </span>
+ <input data-track-${term}="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_"/>
+ </div>
`);
Tracking.trackLoadEvents('_category_'); // only happens once
});
@@ -417,6 +419,35 @@ describe('Tracking', () => {
],
]);
});
+
+ describe.each`
+ event | actionSuffix
+ ${'click'} | ${''}
+ ${'show.bs.dropdown'} | ${'_show'}
+ ${'hide.bs.dropdown'} | ${'_hide'}
+ `(`auto-tracking $event events on nested elements`, ({ event, actionSuffix }) => {
+ let link;
+
+ beforeEach(() => {
+ link = document.querySelector('#link');
+ eventSpy.mockClear();
+ });
+
+ it(`avoids using ancestor [data-track-${term}="render"] tracking configurations`, () => {
+ link.dispatchEvent(new Event(event, { bubbles: true }));
+
+ expect(eventSpy).not.toHaveBeenCalledWith(
+ '_category_',
+ `render${actionSuffix}`,
+ expect.any(Object),
+ );
+ expect(eventSpy).toHaveBeenCalledWith(
+ '_category_',
+ `click_link${actionSuffix}`,
+ expect.objectContaining({ label: 'all_nested_links' }),
+ );
+ });
+ });
});
describe('tracking mixin', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index f44f0b98207..a09269e869c 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -1,6 +1,7 @@
import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Header from '~/vue_merge_request_widget/components/mr_widget_header.vue';
+import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
describe('MRWidgetHeader', () => {
let wrapper;
@@ -35,6 +36,8 @@ describe('MRWidgetHeader', () => {
statusPath: 'abc',
};
+ const findWebIdeButton = () => wrapper.findComponent(WebIdeLink);
+
describe('computed', () => {
describe('shouldShowCommitsBehindText', () => {
it('return true when there are divergedCommitsCount', () => {
@@ -147,73 +150,81 @@ describe('MRWidgetHeader', () => {
statusPath: 'abc',
sourceProjectFullPath: 'root/gitlab-ce',
targetProjectFullPath: 'gitlab-org/gitlab-ce',
+ gitpodEnabled: true,
+ showGitpodButton: true,
+ gitpodUrl: 'http://gitpod.localhost',
};
- beforeEach(() => {
+ it('renders checkout branch button with modal trigger', () => {
createComponent({
mr: { ...mrDefaultOptions },
});
- });
- it('renders checkout branch button with modal trigger', () => {
const button = wrapper.find('.js-check-out-branch');
expect(button.text().trim()).toBe('Check out branch');
});
- it('renders web ide button', async () => {
- const button = wrapper.find('.js-web-ide');
-
- await nextTick();
-
- expect(button.text().trim()).toBe('Open in Web IDE');
- expect(button.classes('disabled')).toBe(false);
- expect(button.attributes('href')).toBe(
- '/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=gitlab-org%2Fgitlab-ce',
- );
- });
-
- it('renders web ide button in disabled state with no href', async () => {
- const mr = { ...mrDefaultOptions, canPushToSourceBranch: false };
- createComponent({ mr });
-
- await nextTick();
-
- const link = wrapper.find('.js-web-ide');
-
- expect(link.attributes('disabled')).toBe('true');
- expect(link.attributes('href')).toBeUndefined();
- });
-
- it('renders web ide button with blank query string if target & source project branch', async () => {
- createComponent({ mr: { ...mrDefaultOptions, targetProjectFullPath: 'root/gitlab-ce' } });
+ it.each([
+ [
+ 'renders web ide button',
+ {
+ mrProps: {},
+ relativeUrl: '',
+ webIdeUrl:
+ '/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=gitlab-org%2Fgitlab-ce',
+ },
+ ],
+ [
+ 'renders web ide button with blank target_project, when mr has same target project',
+ {
+ mrProps: { targetProjectFullPath: 'root/gitlab-ce' },
+ relativeUrl: '',
+ webIdeUrl: '/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=',
+ },
+ ],
+ [
+ 'renders web ide button with relative url',
+ {
+ mrProps: { iid: 2 },
+ relativeUrl: '/gitlab',
+ webIdeUrl:
+ '/gitlab/-/ide/project/root/gitlab-ce/merge_requests/2?target_project=gitlab-org%2Fgitlab-ce',
+ },
+ ],
+ ])('%s', async (_, { mrProps, relativeUrl, webIdeUrl }) => {
+ gon.relative_url_root = relativeUrl;
+ createComponent({
+ mr: { ...mrDefaultOptions, ...mrProps },
+ });
await nextTick();
- const button = wrapper.find('.js-web-ide');
-
- expect(button.text().trim()).toBe('Open in Web IDE');
- expect(button.attributes('href')).toBe(
- '/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=',
- );
+ expect(findWebIdeButton().props()).toMatchObject({
+ showEditButton: false,
+ showWebIdeButton: true,
+ webIdeText: 'Open in Web IDE',
+ gitpodText: 'Open in Gitpod',
+ gitpodEnabled: true,
+ showGitpodButton: true,
+ gitpodUrl: 'http://gitpod.localhost',
+ webIdeUrl,
+ });
});
- it('renders web ide button with relative URL', async () => {
- gon.relative_url_root = '/gitlab';
-
- createComponent({ mr: { ...mrDefaultOptions, iid: 2 } });
+ it('does not render web ide button if source branch is removed', async () => {
+ createComponent({ mr: { ...mrDefaultOptions, sourceBranchRemoved: true } });
await nextTick();
- const button = wrapper.find('.js-web-ide');
-
- expect(button.text().trim()).toBe('Open in Web IDE');
- expect(button.attributes('href')).toBe(
- '/gitlab/-/ide/project/root/gitlab-ce/merge_requests/2?target_project=gitlab-org%2Fgitlab-ce',
- );
+ expect(findWebIdeButton().exists()).toBe(false);
});
it('renders download dropdown with links', () => {
+ createComponent({
+ mr: { ...mrDefaultOptions },
+ });
+
expectDownloadDropdownItems();
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
index a879b06e858..6ea8ca10c02 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
@@ -17,7 +17,7 @@ describe('MRWidgetRelatedLinks', () => {
it('returns Closes text for open merge request', () => {
createComponent({ state: 'open', relatedLinks: {} });
- expect(wrapper.vm.closesText).toBe('Closes');
+ expect(wrapper.vm.closesText).toBe('Closes issues');
});
it('returns correct text for closed merge request', () => {
@@ -38,6 +38,7 @@ describe('MRWidgetRelatedLinks', () => {
createComponent({
relatedLinks: {
closing: '<a href="#">#23</a> and <a>#42</a>',
+ closingCount: 2,
},
});
const content = wrapper
@@ -45,7 +46,7 @@ describe('MRWidgetRelatedLinks', () => {
.replace(/\n(\s)+/g, ' ')
.trim();
- expect(content).toContain('Closes #23 and #42');
+ expect(content).toContain('Closes issues #23 and #42');
expect(content).not.toContain('Mentions');
});
@@ -53,11 +54,17 @@ describe('MRWidgetRelatedLinks', () => {
createComponent({
relatedLinks: {
mentioned: '<a href="#">#7</a>',
+ mentionedCount: 1,
},
});
- expect(wrapper.text().trim()).toContain('Mentions #7');
- expect(wrapper.text().trim()).not.toContain('Closes');
+ const content = wrapper
+ .text()
+ .replace(/\n(\s)+/g, ' ')
+ .trim();
+
+ expect(content).toContain('Mentions issue #7');
+ expect(content).not.toContain('Closes issues');
});
it('should have closing and mentioned issues at the same time', () => {
@@ -65,6 +72,8 @@ describe('MRWidgetRelatedLinks', () => {
relatedLinks: {
closing: '<a href="#">#7</a>',
mentioned: '<a href="#">#23</a> and <a>#42</a>',
+ closingCount: 1,
+ mentionedCount: 2,
},
});
const content = wrapper
@@ -72,8 +81,8 @@ describe('MRWidgetRelatedLinks', () => {
.replace(/\n(\s)+/g, ' ')
.trim();
- expect(content).toContain('Closes #7');
- expect(content).toContain('Mentions #23 and #42');
+ expect(content).toContain('Closes issue #7');
+ expect(content).toContain('Mentions issues #23 and #42');
});
it('should have assing issues link', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
index ac20487c55f..5981d2d7849 100644
--- a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
+++ b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
@@ -4,8 +4,10 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have
<div
class="mr-widget-body media"
>
- <status-icon-stub
- status="success"
+ <gl-icon-stub
+ class="gl-text-blue-500 gl-mr-3 gl-mt-1"
+ name="status_scheduled"
+ size="24"
/>
<div
@@ -17,55 +19,31 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have
<span
class="gl-mr-3"
>
- <span
- class="js-status-text-before-author"
- data-testid="beforeStatusText"
- >
- Set by
- </span>
-
- <mr-widget-author-stub
- author="[object Object]"
- showauthorname="true"
+ <gl-sprintf-stub
+ data-testid="statusText"
+ message="Set by %{merge_author} to be merged automatically when the pipeline succeeds"
/>
-
- <span
- class="js-status-text-after-author"
- data-testid="afterStatusText"
- >
- to be merged automatically when the pipeline succeeds
- </span>
</span>
- <a
- class="btn btn-sm btn-default js-cancel-auto-merge"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-cancel-auto-merge"
data-qa-selector="cancel_auto_merge_button"
data-testid="cancelAutomaticMergeButton"
- href="#"
- role="button"
+ icon=""
+ size="small"
+ variant="default"
>
- <!---->
- Cancel
+ Cancel auto-merge
- </a>
+ </gl-button-stub>
</h4>
<section
class="mr-info-list"
>
- <p>
-
- The changes will be merged into
-
- <a
- class="label-branch"
- href="/foo/bar"
- >
- foo
- </a>
- </p>
-
<p
class="gl-display-flex"
>
@@ -75,17 +53,19 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have
The source branch will not be deleted
</span>
- <a
- class="btn btn-sm btn-default js-remove-source-branch"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-remove-source-branch"
data-testid="removeSourceBranchButton"
- href="#"
- role="button"
+ icon=""
+ size="small"
+ variant="default"
>
- <!---->
Delete source branch
- </a>
+ </gl-button-stub>
</p>
</section>
</div>
@@ -96,8 +76,10 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c
<div
class="mr-widget-body media"
>
- <status-icon-stub
- status="success"
+ <gl-icon-stub
+ class="gl-text-blue-500 gl-mr-3 gl-mt-1"
+ name="status_scheduled"
+ size="24"
/>
<div
@@ -109,55 +91,31 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c
<span
class="gl-mr-3"
>
- <span
- class="js-status-text-before-author"
- data-testid="beforeStatusText"
- >
- Set by
- </span>
-
- <mr-widget-author-stub
- author="[object Object]"
- showauthorname="true"
+ <gl-sprintf-stub
+ data-testid="statusText"
+ message="Set by %{merge_author} to be merged automatically when the pipeline succeeds"
/>
-
- <span
- class="js-status-text-after-author"
- data-testid="afterStatusText"
- >
- to be merged automatically when the pipeline succeeds
- </span>
</span>
- <a
- class="btn btn-sm btn-default js-cancel-auto-merge"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-cancel-auto-merge"
data-qa-selector="cancel_auto_merge_button"
data-testid="cancelAutomaticMergeButton"
- href="#"
- role="button"
+ icon=""
+ size="small"
+ variant="default"
>
- <!---->
- Cancel
+ Cancel auto-merge
- </a>
+ </gl-button-stub>
</h4>
<section
class="mr-info-list"
>
- <p>
-
- The changes will be merged into
-
- <a
- class="label-branch"
- href="/foo/bar"
- >
- foo
- </a>
- </p>
-
<p
class="gl-display-flex"
>
@@ -167,17 +125,19 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c
The source branch will not be deleted
</span>
- <a
- class="btn btn-sm btn-default js-remove-source-branch"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-remove-source-branch"
data-testid="removeSourceBranchButton"
- href="#"
- role="button"
+ icon=""
+ size="small"
+ variant="default"
>
- <!---->
Delete source branch
- </a>
+ </gl-button-stub>
</p>
</section>
</div>
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap
deleted file mode 100644
index cef1dff3335..00000000000
--- a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap
+++ /dev/null
@@ -1,3 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`ReadyToMerge with a mismatched SHA warns the user to refresh to review 1`] = `"<gl-sprintf-stub message=\\"New changes were added. %{linkStart}Reload the page to review them%{linkEnd}\\"></gl-sprintf-stub>"`;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index 0110a76e722..4c1534574f5 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -72,6 +72,8 @@ const defaultMrProps = () => ({
autoMergeStrategy: MWPS_MERGE_STRATEGY,
});
+const getStatusText = () => wrapper.findByTestId('statusText').attributes('message');
+
describe('MRWidgetAutoMergeEnabled', () => {
let oldWindowGl;
@@ -167,30 +169,6 @@ describe('MRWidgetAutoMergeEnabled', () => {
});
});
- describe('statusTextBeforeAuthor', () => {
- it('should return "Set by" if the MWPS is selected', () => {
- factory({
- ...defaultMrProps(),
- autoMergeStrategy: MWPS_MERGE_STRATEGY,
- });
-
- expect(wrapper.findByTestId('beforeStatusText').text()).toBe('Set by');
- });
- });
-
- describe('statusTextAfterAuthor', () => {
- it('should return "to be merged automatically..." if MWPS is selected', () => {
- factory({
- ...defaultMrProps(),
- autoMergeStrategy: MWPS_MERGE_STRATEGY,
- });
-
- expect(wrapper.findByTestId('afterStatusText').text()).toBe(
- 'to be merged automatically when the pipeline succeeds',
- );
- });
- });
-
describe('cancelButtonText', () => {
it('should return "Cancel" if MWPS is selected', () => {
factory({
@@ -198,7 +176,9 @@ describe('MRWidgetAutoMergeEnabled', () => {
autoMergeStrategy: MWPS_MERGE_STRATEGY,
});
- expect(wrapper.findByTestId('cancelAutomaticMergeButton').text()).toBe('Cancel');
+ expect(wrapper.findByTestId('cancelAutomaticMergeButton').text()).toBe(
+ 'Cancel auto-merge',
+ );
});
});
});
@@ -279,7 +259,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
await nextTick();
- expect(wrapper.find('.js-cancel-auto-merge').attributes('disabled')).toBe('disabled');
+ expect(wrapper.find('.js-cancel-auto-merge').props('loading')).toBe(true);
});
it('should show source branch will be deleted text when it source branch set to remove', () => {
@@ -313,7 +293,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
await nextTick();
- expect(wrapper.find('.js-remove-source-branch').attributes('disabled')).toBe('disabled');
+ expect(wrapper.find('.js-remove-source-branch').props('loading')).toBe(true);
});
it('should render the status text as "...to merged automatically" if MWPS is selected', () => {
@@ -322,9 +302,9 @@ describe('MRWidgetAutoMergeEnabled', () => {
autoMergeStrategy: MWPS_MERGE_STRATEGY,
});
- const statusText = trimText(wrapper.find('.js-status-text-after-author').text());
-
- expect(statusText).toBe('to be merged automatically when the pipeline succeeds');
+ expect(getStatusText()).toBe(
+ 'Set by %{merge_author} to be merged automatically when the pipeline succeeds',
+ );
});
it('should render the cancel button as "Cancel" if MWPS is selected', () => {
@@ -335,7 +315,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
const cancelButtonText = trimText(wrapper.find('.js-cancel-auto-merge').text());
- expect(cancelButtonText).toBe('Cancel');
+ expect(cancelButtonText).toBe('Cancel auto-merge');
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index cd77d442cbf..e41fb815c8d 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -1,4 +1,3 @@
-import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import simplePoll from '~/lib/utils/simple_poll';
@@ -782,26 +781,4 @@ describe('ReadyToMerge', () => {
});
});
});
-
- describe('with a mismatched SHA', () => {
- const findMismatchShaBlock = () => wrapper.find('.js-sha-mismatch');
- const findMismatchShaTextBlock = () => findMismatchShaBlock().find(GlSprintf);
-
- beforeEach(() => {
- createComponent({
- mr: {
- isSHAMismatch: true,
- mergeRequestDiffsPath: '/merge_requests/1/diffs',
- },
- });
- });
-
- it('displays a warning message', () => {
- expect(findMismatchShaBlock().exists()).toBe(true);
- });
-
- it('warns the user to refresh to review', () => {
- expect(findMismatchShaTextBlock().element.outerHTML).toMatchSnapshot();
- });
- });
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
index ef6a9b1e8fc..2a343997cf5 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
@@ -1,25 +1,42 @@
-import Vue from 'vue';
-import { removeBreakLine } from 'helpers/text_helper';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue';
+import { I18N_SHA_MISMATCH } from '~/vue_merge_request_widget/i18n';
+
+function createComponent({ path = '' } = {}) {
+ return mount(ShaMismatch, {
+ propsData: {
+ mr: {
+ mergeRequestDiffsPath: path,
+ },
+ },
+ });
+}
describe('ShaMismatch', () => {
- let vm;
+ let wrapper;
+ const findActionButton = () => wrapper.find('[data-testid="action-button"]');
beforeEach(() => {
- const Component = Vue.extend(ShaMismatch);
- vm = mountComponent(Component);
+ wrapper = createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ });
+
+ it('should render warning message', () => {
+ expect(wrapper.element.innerText).toContain(I18N_SHA_MISMATCH.warningMessage);
});
- it('should render information message', () => {
- expect(vm.$el.querySelector('button').disabled).toEqual(true);
+ it('action button should have correct label', () => {
+ expect(findActionButton().text()).toBe(I18N_SHA_MISMATCH.actionButtonLabel);
+ });
+
+ it('action button should link to the diff path', () => {
+ const DIFF_PATH = '/gitlab-org/gitlab-test/-/merge_requests/6/diffs';
+
+ wrapper = createComponent({ path: DIFF_PATH });
- expect(removeBreakLine(vm.$el.textContent).trim()).toContain(
- 'The source branch HEAD has recently changed. Please reload the page and review the changes before merging',
- );
+ expect(findActionButton().attributes('href')).toBe(DIFF_PATH);
});
});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
index 49783560bf2..31ade17e50a 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
@@ -45,7 +45,6 @@ describe('DeploymentAction component', () => {
propsData: {
computedDeploymentStatus: CREATED,
deployment: deploymentMockData,
- showVisualReviewApp: false,
},
});
});
@@ -64,7 +63,6 @@ describe('DeploymentAction component', () => {
...deploymentMockData,
stop_url: null,
},
- showVisualReviewApp: false,
},
});
});
@@ -115,7 +113,6 @@ describe('DeploymentAction component', () => {
...deploymentMockData,
details: displayConditionChanges,
},
- showVisualReviewApp: false,
},
});
});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js
index dd0c483b28a..948d7ebab5e 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js
@@ -7,7 +7,6 @@ import MrCollapsibleExtension from '~/vue_merge_request_widget/components/mr_col
import { mockStore } from '../mock_data';
const DEFAULT_PROPS = {
- showVisualReviewAppLink: false,
hasDeploymentMetrics: false,
deploymentClass: 'js-pre-deployment',
};
@@ -46,7 +45,6 @@ describe('~/vue_merge_request_widget/components/deployment/deployment_list.vue',
([deploymentWrapper, deployment]) => {
expect(deploymentWrapper.props('deployment')).toEqual(deployment);
expect(deploymentWrapper.props()).toMatchObject({
- showVisualReviewApp: DEFAULT_PROPS.showVisualReviewAppLink,
showMetrics: DEFAULT_PROPS.hasDeploymentMetrics,
});
expect(deploymentWrapper.classes(DEFAULT_PROPS.deploymentClass)).toBe(true);
@@ -87,10 +85,6 @@ describe('~/vue_merge_request_widget/components/deployment/deployment_list.vue',
zip(deploymentWrappers.wrappers, propsData.deployments).forEach(
([deploymentWrapper, deployment]) => {
expect(deploymentWrapper.props('deployment')).toEqual(deployment);
- expect(deploymentWrapper.props()).toMatchObject({
- showVisualReviewApp: DEFAULT_PROPS.showVisualReviewAppLink,
- showMetrics: DEFAULT_PROPS.hasDeploymentMetrics,
- });
expect(deploymentWrapper.classes(DEFAULT_PROPS.deploymentClass)).toBe(true);
expect(deploymentWrapper.text()).toEqual(expect.any(String));
expect(deploymentWrapper.text()).not.toBe('');
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index e6f1e15d718..f356f6fb5bf 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -234,14 +234,11 @@ export default {
can_revert_on_current_merge_request: true,
can_cherry_pick_on_current_merge_request: true,
},
- codeclimate: {
- head_path: 'head.json',
- base_path: 'base.json',
- },
blob_path: {
base_path: 'blob_path',
head_path: 'blob_path',
},
+ codequality_reports_path: 'codequality_reports.json',
codequality_help_path: 'code_quality.html',
target_branch_path: '/root/acets-app/branches/main',
source_branch_path: '/root/acets-app/branches/daaaa',
@@ -284,6 +281,9 @@ export default {
security_reports_docs_path: 'security-reports-docs-path',
sast_comparison_path: '/sast_comparison_path',
secret_scanning_comparison_path: '/secret_scanning_comparison_path',
+ gitpod_enabled: true,
+ show_gitpod_button: true,
+ gitpod_url: 'http://gitpod.localhost',
};
export const mockStore = {
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index 9da370747fc..c50cf7cb076 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -12,7 +12,7 @@ import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/consta
import eventHub from '~/vue_merge_request_widget/event_hub';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
-import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_merge_request_download_paths.query.graphql';
+import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
import mockData from './mock_data';
@@ -80,14 +80,15 @@ describe('MrWidgetOptions', () => {
describe('computed', () => {
describe('componentName', () => {
- it('should return merged component', () => {
- expect(wrapper.vm.componentName).toEqual('mr-widget-merged');
- });
-
- it('should return conflicts component', () => {
- wrapper.vm.mr.state = 'conflicts';
-
- expect(wrapper.vm.componentName).toEqual('mr-widget-conflicts');
+ it.each`
+ state | componentName
+ ${'merged'} | ${'mr-widget-merged'}
+ ${'conflicts'} | ${'mr-widget-conflicts'}
+ ${'shaMismatch'} | ${'sha-mismatch'}
+ `('should translate $state into $componentName', ({ state, componentName }) => {
+ wrapper.vm.mr.state = state;
+
+ expect(wrapper.vm.componentName).toEqual(componentName);
});
});
diff --git a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
index cfc846075ea..bf0179aa425 100644
--- a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -10,6 +10,14 @@ describe('MergeRequestStore', () => {
store = new MergeRequestStore(mockData);
});
+ it('should initialize gitpod attributes', () => {
+ expect(store).toMatchObject({
+ gitpodEnabled: mockData.gitpod_enabled,
+ showGitpodButton: mockData.show_gitpod_button,
+ gitpodUrl: mockData.gitpod_url,
+ });
+ });
+
describe('setData', () => {
it('should set isSHAMismatch when the diff SHA changes', () => {
store.setData({ ...mockData, diff_head_sha: 'a-different-string' });
diff --git a/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js b/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
new file mode 100644
index 00000000000..016fe1f131e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
@@ -0,0 +1,97 @@
+import { GlDropdown, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import DropdownWidget from '~/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue';
+
+describe('DropdownWidget component', () => {
+ let wrapper;
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findSearch = () => wrapper.findComponent(GlSearchBoxByType);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(DropdownWidget, {
+ propsData: {
+ options: [
+ {
+ id: '1',
+ title: 'Option 1',
+ },
+ {
+ id: '2',
+ title: 'Option 2',
+ },
+ ],
+ ...props,
+ },
+ stubs: {
+ GlDropdown,
+ },
+ });
+
+ // We need to mock out `showDropdown` which
+ // invokes `show` method of BDropdown used inside GlDropdown.
+ // Context: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54895#note_524281679
+ jest.spyOn(wrapper.vm, 'showDropdown').mockImplementation();
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('passes default selectText prop to dropdown', () => {
+ expect(findDropdown().props('text')).toBe('Select');
+ });
+
+ describe('when dropdown is open', () => {
+ beforeEach(async () => {
+ findDropdown().vm.$emit('show');
+ await wrapper.vm.$nextTick();
+ });
+
+ it('emits search event when typing in search box', () => {
+ const searchTerm = 'searchTerm';
+ findSearch().vm.$emit('input', searchTerm);
+
+ expect(wrapper.emitted('set-search')).toEqual([[searchTerm]]);
+ });
+
+ it('renders one selectable item per passed option', async () => {
+ expect(findDropdownItems()).toHaveLength(2);
+ });
+
+ it('emits set-option event when clicking on an option', async () => {
+ wrapper
+ .findAll('[data-testid="unselected-option"]')
+ .at(1)
+ .vm.$emit('click', new Event('click'));
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted('set-option')).toEqual([[wrapper.props().options[1]]]);
+ });
+ });
+
+ describe('when options are users', () => {
+ const mockUser = {
+ id: 1,
+ name: 'User name',
+ username: 'username',
+ avatarUrl: 'foo/bar',
+ };
+
+ beforeEach(() => {
+ createComponent({ props: { options: [mockUser] } });
+ });
+
+ it('passes user related props to dropdown item', () => {
+ expect(findDropdownItems().at(0).props('avatarUrl')).toBe(mockUser.avatarUrl);
+ expect(findDropdownItems().at(0).props('secondaryText')).toBe(mockUser.username);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
index 9fa9d35e3e2..8e931aebfe0 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
@@ -32,6 +32,9 @@ jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils', (
stripQuotes: jest.requireActual(
'~/vue_shared/components/filtered_search_bar/filtered_search_utils',
).stripQuotes,
+ filterEmptySearchTerm: jest.requireActual(
+ '~/vue_shared/components/filtered_search_bar/filtered_search_utils',
+ ).filterEmptySearchTerm,
}));
const createComponent = ({
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 74f579e77ed..d3e1bfef561 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -86,7 +86,7 @@ describe('AuthorToken', () => {
});
describe('methods', () => {
- describe('fetchAuthorBySearchTerm', () => {
+ describe('fetchAuthors', () => {
beforeEach(() => {
wrapper = createComponent();
});
@@ -155,7 +155,7 @@ describe('AuthorToken', () => {
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
suggestions: mockAuthors,
- fnActiveTokenValue: wrapper.vm.getActiveAuthor,
+ getActiveTokenValue: wrapper.vm.getActiveAuthor,
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index cd6ffd679d0..eb1dbed52cc 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -5,7 +5,7 @@ import {
mockLabels,
} from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
-import { DEFAULT_LABELS } from '~/vue_shared/components/filtered_search_bar/constants';
+import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import {
getRecentlyUsedSuggestions,
setTokenValueToRecentlyUsed,
@@ -51,9 +51,8 @@ const mockProps = {
active: false,
suggestions: [],
suggestionsLoading: false,
- defaultSuggestions: DEFAULT_LABELS,
+ defaultSuggestions: DEFAULT_NONE_ANY,
recentSuggestionsStorageKey: mockStorageKey,
- fnCurrentTokenValue: jest.fn(),
};
function createComponent({
@@ -99,31 +98,20 @@ describe('BaseToken', () => {
});
describe('computed', () => {
- describe('currentTokenValue', () => {
- it('calls `fnCurrentTokenValue` when it is provided', () => {
- // We're disabling lint to trigger computed prop execution for this test.
- // eslint-disable-next-line no-unused-vars
- const { currentTokenValue } = wrapper.vm;
-
- expect(wrapper.vm.fnCurrentTokenValue).toHaveBeenCalledWith(`"${mockRegularLabel.title}"`);
- });
- });
-
describe('activeTokenValue', () => {
- it('calls `fnActiveTokenValue` when it is provided', async () => {
- const mockFnActiveTokenValue = jest.fn();
+ it('calls `getActiveTokenValue` when it is provided', async () => {
+ const mockGetActiveTokenValue = jest.fn();
wrapper.setProps({
- fnActiveTokenValue: mockFnActiveTokenValue,
- fnCurrentTokenValue: undefined,
+ getActiveTokenValue: mockGetActiveTokenValue,
});
await wrapper.vm.$nextTick();
- expect(mockFnActiveTokenValue).toHaveBeenCalledTimes(1);
- expect(mockFnActiveTokenValue).toHaveBeenCalledWith(
+ expect(mockGetActiveTokenValue).toHaveBeenCalledTimes(1);
+ expect(mockGetActiveTokenValue).toHaveBeenCalledWith(
mockLabels,
- `"${mockRegularLabel.title.toLowerCase()}"`,
+ `"${mockRegularLabel.title}"`,
);
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
index 331c9c2c14d..09eac636cae 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
@@ -61,40 +61,16 @@ describe('BranchToken', () => {
wrapper.destroy();
});
- describe('computed', () => {
- beforeEach(async () => {
- wrapper = createComponent({ value: { data: mockBranches[0].name } });
-
- wrapper.setData({
- branches: mockBranches,
- });
-
- await wrapper.vm.$nextTick();
- });
-
- describe('currentValue', () => {
- it('returns lowercase string for `value.data`', () => {
- expect(wrapper.vm.currentValue).toBe('main');
- });
- });
-
- describe('activeBranch', () => {
- it('returns object for currently present `value.data`', () => {
- expect(wrapper.vm.activeBranch).toEqual(mockBranches[0]);
- });
- });
- });
-
describe('methods', () => {
beforeEach(() => {
wrapper = createComponent();
});
- describe('fetchBranchBySearchTerm', () => {
+ describe('fetchBranches', () => {
it('calls `config.fetchBranches` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchBranches');
- wrapper.vm.fetchBranchBySearchTerm('foo');
+ wrapper.vm.fetchBranches('foo');
expect(wrapper.vm.config.fetchBranches).toHaveBeenCalledWith('foo');
});
@@ -102,7 +78,7 @@ describe('BranchToken', () => {
it('sets response to `branches` when request is succesful', () => {
jest.spyOn(wrapper.vm.config, 'fetchBranches').mockResolvedValue({ data: mockBranches });
- wrapper.vm.fetchBranchBySearchTerm('foo');
+ wrapper.vm.fetchBranches('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.branches).toEqual(mockBranches);
@@ -112,7 +88,7 @@ describe('BranchToken', () => {
it('calls `createFlash` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchBranches').mockRejectedValue({});
- wrapper.vm.fetchBranchBySearchTerm('foo');
+ wrapper.vm.fetchBranches('foo');
return waitForPromises().then(() => {
expect(createFlash).toHaveBeenCalledWith({
@@ -124,7 +100,7 @@ describe('BranchToken', () => {
it('sets `loading` to false when request completes', () => {
jest.spyOn(wrapper.vm.config, 'fetchBranches').mockRejectedValue({});
- wrapper.vm.fetchBranchBySearchTerm('foo');
+ wrapper.vm.fetchBranches('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.loading).toBe(false);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
index 778a214f97e..c2d61fd9f05 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -67,40 +67,16 @@ describe('EmojiToken', () => {
wrapper.destroy();
});
- describe('computed', () => {
- beforeEach(async () => {
- wrapper = createComponent({ value: { data: mockEmojis[0].name } });
-
- wrapper.setData({
- emojis: mockEmojis,
- });
-
- await wrapper.vm.$nextTick();
- });
-
- describe('currentValue', () => {
- it('returns lowercase string for `value.data`', () => {
- expect(wrapper.vm.currentValue).toBe(mockEmojis[0].name);
- });
- });
-
- describe('activeEmoji', () => {
- it('returns object for currently present `value.data`', () => {
- expect(wrapper.vm.activeEmoji).toEqual(mockEmojis[0]);
- });
- });
- });
-
describe('methods', () => {
beforeEach(() => {
wrapper = createComponent();
});
- describe('fetchEmojiBySearchTerm', () => {
+ describe('fetchEmojis', () => {
it('calls `config.fetchEmojis` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchEmojis');
- wrapper.vm.fetchEmojiBySearchTerm('foo');
+ wrapper.vm.fetchEmojis('foo');
expect(wrapper.vm.config.fetchEmojis).toHaveBeenCalledWith('foo');
});
@@ -108,7 +84,7 @@ describe('EmojiToken', () => {
it('sets response to `emojis` when request is successful', () => {
jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockResolvedValue(mockEmojis);
- wrapper.vm.fetchEmojiBySearchTerm('foo');
+ wrapper.vm.fetchEmojis('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.emojis).toEqual(mockEmojis);
@@ -118,7 +94,7 @@ describe('EmojiToken', () => {
it('calls `createFlash` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockRejectedValue({});
- wrapper.vm.fetchEmojiBySearchTerm('foo');
+ wrapper.vm.fetchEmojis('foo');
return waitForPromises().then(() => {
expect(createFlash).toHaveBeenCalledWith({
@@ -130,7 +106,7 @@ describe('EmojiToken', () => {
it('sets `loading` to false when request completes', () => {
jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockRejectedValue({});
- wrapper.vm.fetchEmojiBySearchTerm('foo');
+ wrapper.vm.fetchEmojis('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.loading).toBe(false);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
index bd654c5a9cb..a609aaa1c4e 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
@@ -1,5 +1,6 @@
import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue';
import { mockIterationToken } from '../mock_data';
@@ -13,6 +14,7 @@ describe('IterationToken', () => {
const createComponent = ({ config = mockIterationToken, value = { data: '' } } = {}) =>
mount(IterationToken, {
propsData: {
+ active: false,
config,
value,
},
@@ -69,7 +71,7 @@ describe('IterationToken', () => {
config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
});
- await wrapper.vm.$nextTick();
+ await waitForPromises();
expect(createFlash).toHaveBeenCalledWith({
message: 'There was a problem fetching iterations.',
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index ec9458f64d2..a348344b9dd 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -13,10 +13,7 @@ import {
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import {
- DEFAULT_LABELS,
- DEFAULT_NONE_ANY,
-} from '~/vue_shared/components/filtered_search_bar/constants';
+import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
@@ -98,11 +95,11 @@ describe('LabelToken', () => {
});
});
- describe('fetchLabelBySearchTerm', () => {
+ describe('fetchLabels', () => {
it('calls `config.fetchLabels` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels');
- wrapper.vm.fetchLabelBySearchTerm('foo');
+ wrapper.vm.fetchLabels('foo');
expect(wrapper.vm.config.fetchLabels).toHaveBeenCalledWith('foo');
});
@@ -110,7 +107,7 @@ describe('LabelToken', () => {
it('sets response to `labels` when request is succesful', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockResolvedValue(mockLabels);
- wrapper.vm.fetchLabelBySearchTerm('foo');
+ wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.labels).toEqual(mockLabels);
@@ -120,7 +117,7 @@ describe('LabelToken', () => {
it('calls `createFlash` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
- wrapper.vm.fetchLabelBySearchTerm('foo');
+ wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
expect(createFlash).toHaveBeenCalledWith({
@@ -132,7 +129,7 @@ describe('LabelToken', () => {
it('sets `loading` to false when request completes', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
- wrapper.vm.fetchLabelBySearchTerm('foo');
+ wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.loading).toBe(false);
@@ -160,7 +157,7 @@ describe('LabelToken', () => {
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
suggestions: mockLabels,
- fnActiveTokenValue: wrapper.vm.getActiveLabel,
+ getActiveTokenValue: wrapper.vm.getActiveLabel,
});
});
@@ -208,7 +205,7 @@ describe('LabelToken', () => {
expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
});
- it('renders `DEFAULT_LABELS` as default suggestions', () => {
+ it('renders `DEFAULT_NONE_ANY` as default suggestions', () => {
wrapper = createComponent({
active: true,
config: { ...mockLabelToken },
@@ -220,8 +217,8 @@ describe('LabelToken', () => {
const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
- expect(suggestions).toHaveLength(DEFAULT_LABELS.length);
- DEFAULT_LABELS.forEach((label, index) => {
+ expect(suggestions).toHaveLength(DEFAULT_NONE_ANY.length);
+ DEFAULT_NONE_ANY.forEach((label, index) => {
expect(suggestions.at(index).text()).toBe(label.text);
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index 74ceb03bb96..529844817d3 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -14,12 +14,7 @@ import { sortMilestonesByDueDate } from '~/milestones/milestone_utils';
import { DEFAULT_MILESTONES } from '~/vue_shared/components/filtered_search_bar/constants';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
-import {
- mockMilestoneToken,
- mockMilestones,
- mockRegularMilestone,
- mockEscapedMilestone,
-} from '../mock_data';
+import { mockMilestoneToken, mockMilestones, mockRegularMilestone } from '../mock_data';
jest.mock('~/flash');
jest.mock('~/milestones/milestone_utils');
@@ -70,37 +65,12 @@ describe('MilestoneToken', () => {
wrapper.destroy();
});
- describe('computed', () => {
- beforeEach(async () => {
- // Milestone title with spaces is always enclosed in quotations by component.
- wrapper = createComponent({ value: { data: `"${mockEscapedMilestone.title}"` } });
-
- wrapper.setData({
- milestones: mockMilestones,
- });
-
- await wrapper.vm.$nextTick();
- });
-
- describe('currentValue', () => {
- it('returns lowercase string for `value.data`', () => {
- expect(wrapper.vm.currentValue).toBe('"5.0 rc1"');
- });
- });
-
- describe('activeMilestone', () => {
- it('returns object for currently present `value.data`', () => {
- expect(wrapper.vm.activeMilestone).toEqual(mockEscapedMilestone);
- });
- });
- });
-
describe('methods', () => {
- describe('fetchMilestoneBySearchTerm', () => {
+ describe('fetchMilestones', () => {
it('calls `config.fetchMilestones` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchMilestones');
- wrapper.vm.fetchMilestoneBySearchTerm('foo');
+ wrapper.vm.fetchMilestones('foo');
expect(wrapper.vm.config.fetchMilestones).toHaveBeenCalledWith('foo');
});
@@ -110,7 +80,7 @@ describe('MilestoneToken', () => {
data: mockMilestones,
});
- wrapper.vm.fetchMilestoneBySearchTerm();
+ wrapper.vm.fetchMilestones();
return waitForPromises().then(() => {
expect(wrapper.vm.milestones).toEqual(mockMilestones);
@@ -121,7 +91,7 @@ describe('MilestoneToken', () => {
it('calls `createFlash` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchMilestones').mockRejectedValue({});
- wrapper.vm.fetchMilestoneBySearchTerm('foo');
+ wrapper.vm.fetchMilestones('foo');
return waitForPromises().then(() => {
expect(createFlash).toHaveBeenCalledWith({
@@ -133,7 +103,7 @@ describe('MilestoneToken', () => {
it('sets `loading` to false when request completes', () => {
jest.spyOn(wrapper.vm.config, 'fetchMilestones').mockRejectedValue({});
- wrapper.vm.fetchMilestoneBySearchTerm('foo');
+ wrapper.vm.fetchMilestones('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.loading).toBe(false);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
index 9a72be636cd..e788c742736 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
@@ -12,6 +12,7 @@ describe('WeightToken', () => {
const createComponent = ({ config = mockWeightToken, value = { data: '' } } = {}) =>
mount(WeightToken, {
propsData: {
+ active: false,
config,
value,
},
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
index 8738924f717..6ab828efebe 100644
--- a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
@@ -144,23 +144,6 @@ describe('RelatedIssuableItem', () => {
expect(wrapper.find(IssueDueDate).props('closed')).toBe(true);
});
-
- it('should not contain the `.text-danger` css class for overdue issue that is closed', async () => {
- mountComponent({
- props: {
- ...props,
- closedAt: '2018-12-01T00:00:00.00Z',
- },
- });
- await wrapper.vm.$nextTick();
-
- expect(wrapper.find(IssueDueDate).find('.board-card-info-icon').classes('text-danger')).toBe(
- false,
- );
- expect(wrapper.find(IssueDueDate).find('.board-card-info-text').classes('text-danger')).toBe(
- false,
- );
- });
});
describe('token assignees', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
index 786dfabb990..19e4f2d8c92 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
@@ -1,3 +1,4 @@
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ToolbarButton from '~/vue_shared/components/markdown/toolbar_button.vue';
@@ -25,7 +26,7 @@ describe('toolbar_button', () => {
});
const getButtonShortcutsAttr = () => {
- return wrapper.find('button').attributes('data-md-shortcuts');
+ return wrapper.find(GlButton).attributes('data-md-shortcuts');
};
describe('keyboard shortcuts', () => {
diff --git a/spec/frontend/vue_shared/components/papa_parse_alert_spec.js b/spec/frontend/vue_shared/components/papa_parse_alert_spec.js
new file mode 100644
index 00000000000..9be2de17d01
--- /dev/null
+++ b/spec/frontend/vue_shared/components/papa_parse_alert_spec.js
@@ -0,0 +1,44 @@
+import { GlAlert } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import PapaParseAlert from '~/vue_shared/components/papa_parse_alert.vue';
+
+describe('app/assets/javascripts/vue_shared/components/papa_parse_alert.vue', () => {
+ let wrapper;
+
+ const createComponent = ({ errorMessages } = {}) => {
+ wrapper = shallowMount(PapaParseAlert, {
+ propsData: {
+ papaParseErrors: errorMessages,
+ },
+ });
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render alert with correct props', async () => {
+ createComponent({ errorMessages: [{ code: 'MissingQuotes' }] });
+ await nextTick;
+
+ expect(findAlert().props()).toMatchObject({
+ variant: 'danger',
+ });
+ expect(findAlert().text()).toContain(
+ 'Failed to render the CSV file for the following reasons:',
+ );
+ expect(findAlert().text()).toContain('Quoted field unterminated');
+ });
+
+ it('should render original message if no translation available', async () => {
+ createComponent({
+ errorMessages: [{ code: 'NotDefined', message: 'Error code is undefined' }],
+ });
+ await nextTick;
+
+ expect(findAlert().text()).toContain('Error code is undefined');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
index 395c74dcba6..71ebe561def 100644
--- a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
@@ -13,7 +13,7 @@ import {
REPORT_TYPE_SAST,
REPORT_TYPE_SECRET_DETECTION,
} from '~/vue_shared/security_reports/constants';
-import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_merge_request_download_paths.query.graphql';
+import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql';
jest.mock('~/flash');
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 06ea88c09a0..a1942e59571 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -116,6 +116,8 @@ describe('DropdownContentsLabelsView', () => {
});
describe('methods', () => {
+ const fakePreventDefault = jest.fn();
+
describe('isLabelSelected', () => {
it('returns true when provided `label` param is one of the selected labels', () => {
expect(wrapper.vm.isLabelSelected(mockRegularLabel)).toBe(true);
@@ -191,9 +193,11 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.handleKeyDown({
keyCode: ENTER_KEY_CODE,
+ preventDefault: fakePreventDefault,
});
expect(wrapper.vm.searchKey).toBe('');
+ expect(fakePreventDefault).toHaveBeenCalled();
});
it('calls action `updateSelectedLabels` with currently highlighted label when Enter key is pressed', () => {
@@ -204,6 +208,7 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.handleKeyDown({
keyCode: ENTER_KEY_CODE,
+ preventDefault: fakePreventDefault,
});
expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
index be849789667..bc1ec8b812b 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -238,4 +238,14 @@ describe('LabelsSelectRoot', () => {
expect(store.dispatch).not.toHaveBeenCalled();
});
+
+ it('calls updateLabelsSetState after selected labels were updated', async () => {
+ createComponent();
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+ await wrapper.setProps({ selectedLabels: [] });
+ jest.advanceTimersByTime(100);
+
+ expect(store.dispatch).toHaveBeenCalledWith('updateLabelsSetState');
+ });
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
index 46ade5d5857..2e4c056df61 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -214,7 +214,7 @@ describe('LabelsSelect Actions', () => {
});
describe('on success', () => {
- it('dispatches `requestCreateLabel`, `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', (done) => {
+ it('dispatches `requestCreateLabel`, `fetchLabels` & `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', (done) => {
const label = { id: 1 };
mock.onPost(/labels.json/).replyOnce(200, label);
@@ -225,6 +225,7 @@ describe('LabelsSelect Actions', () => {
[],
[
{ type: 'requestCreateLabel' },
+ { payload: { refetch: true }, type: 'fetchLabels' },
{ type: 'receiveCreateLabelSuccess' },
{ type: 'toggleDropdownContentsCreateView' },
],
@@ -263,4 +264,16 @@ describe('LabelsSelect Actions', () => {
);
});
});
+
+ describe('updateLabelsSetState', () => {
+ it('updates labels `set` state to match `selectedLabels`', () => {
+ testAction(
+ actions.updateLabelsSetState,
+ {},
+ state,
+ [{ type: types.UPDATE_LABELS_SET_STATE }],
+ [],
+ );
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index 1d2a9c34599..14e0c8a2278 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -197,4 +197,26 @@ describe('LabelsSelect Mutations', () => {
});
});
});
+
+ describe(`${types.UPDATE_LABELS_SET_STATE}`, () => {
+ it('updates labels `set` state to match selected labels', () => {
+ const state = {
+ labels: [
+ { id: 1, title: 'scoped::test', set: false },
+ { id: 2, set: true, title: 'scoped::one', touched: true },
+ { id: 3, title: '' },
+ { id: 4, title: '' },
+ ],
+ selectedLabels: [{ id: 1 }, { id: 3 }],
+ };
+ mutations[types.UPDATE_LABELS_SET_STATE](state);
+
+ expect(state.labels).toEqual([
+ { id: 1, title: 'scoped::test', set: true },
+ { id: 2, set: false, title: 'scoped::one', touched: true },
+ { id: 3, title: '', set: true },
+ { id: 4, title: '', set: false },
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
index 46a11bc28d8..90bc1980ac3 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
@@ -1,6 +1,6 @@
import { GlLoadingIcon, GlLink } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
+import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -14,7 +14,7 @@ jest.mock('~/flash');
const colors = Object.keys(mockSuggestedColors);
const localVue = createLocalVue();
-Vue.use(VueApollo);
+localVue.use(VueApollo);
const userRecoverableError = {
...createLabelSuccessfulResponse,
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
index 51301387c99..8bd944a3d54 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
@@ -1,357 +1,213 @@
-import { GlIntersectionObserver, GlLoadingIcon, GlSearchBoxByType, GlLink } from '@gitlab/ui';
+import { GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { UP_KEY_CODE, DOWN_KEY_CODE, ENTER_KEY_CODE, ESC_KEY_CODE } from '~/lib/utils/keycodes';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_widget/constants';
import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view.vue';
+import projectLabelsQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
import LabelItem from '~/vue_shared/components/sidebar/labels_select_widget/label_item.vue';
+import { mockConfig, labelsQueryResponse } from './mock_data';
-import * as actions from '~/vue_shared/components/sidebar/labels_select_widget/store/actions';
-import * as getters from '~/vue_shared/components/sidebar/labels_select_widget/store/getters';
-import mutations from '~/vue_shared/components/sidebar/labels_select_widget/store/mutations';
-import defaultState from '~/vue_shared/components/sidebar/labels_select_widget/store/state';
-
-import { mockConfig, mockLabels, mockRegularLabel } from './mock_data';
+jest.mock('~/flash');
const localVue = createLocalVue();
-localVue.use(Vuex);
+localVue.use(VueApollo);
+
+const selectedLabels = [
+ {
+ id: 28,
+ title: 'Bug',
+ description: 'Label for bugs',
+ color: '#FF0000',
+ textColor: '#FFFFFF',
+ },
+];
describe('DropdownContentsLabelsView', () => {
let wrapper;
- const createComponent = (initialState = mockConfig) => {
- const store = new Vuex.Store({
- getters,
- mutations,
- state: {
- ...defaultState(),
- footerCreateLabelTitle: 'Create label',
- footerManageLabelTitle: 'Manage labels',
- },
- actions: {
- ...actions,
- fetchLabels: jest.fn(),
- },
- });
+ const successfulQueryHandler = jest.fn().mockResolvedValue(labelsQueryResponse);
- store.dispatch('setInitialState', initialState);
- store.dispatch('receiveLabelsSuccess', mockLabels);
+ const createComponent = ({
+ initialState = mockConfig,
+ queryHandler = successfulQueryHandler,
+ injected = {},
+ } = {}) => {
+ const mockApollo = createMockApollo([[projectLabelsQuery, queryHandler]]);
wrapper = shallowMount(DropdownContentsLabelsView, {
localVue,
- store,
+ apolloProvider: mockApollo,
+ provide: {
+ projectPath: 'test',
+ iid: 1,
+ allowLabelCreate: true,
+ labelsManagePath: '/gitlab-org/my-project/-/labels',
+ variant: DropdownVariant.Sidebar,
+ ...injected,
+ },
+ propsData: {
+ ...initialState,
+ selectedLabels,
+ },
+ stubs: {
+ GlSearchBoxByType,
+ },
});
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- const findDropdownContent = () => wrapper.find('[data-testid="dropdown-content"]');
+ const findSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const findLabels = () => wrapper.findAllComponents(LabelItem);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ const findLabelsList = () => wrapper.find('[data-testid="labels-list"]');
+ const findDropdownWrapper = () => wrapper.find('[data-testid="dropdown-wrapper"]');
const findDropdownFooter = () => wrapper.find('[data-testid="dropdown-footer"]');
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
-
- describe('computed', () => {
- describe('visibleLabels', () => {
- it('returns matching labels filtered with `searchKey`', () => {
- wrapper.setData({
- searchKey: 'bug',
- });
-
- expect(wrapper.vm.visibleLabels.length).toBe(1);
- expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
- });
-
- it('returns matching labels with fuzzy filtering', () => {
- wrapper.setData({
- searchKey: 'bg',
- });
-
- expect(wrapper.vm.visibleLabels.length).toBe(2);
- expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
- expect(wrapper.vm.visibleLabels[1].title).toBe('Boog');
- });
-
- it('returns all labels when `searchKey` is empty', () => {
- wrapper.setData({
- searchKey: '',
- });
-
- expect(wrapper.vm.visibleLabels.length).toBe(mockLabels.length);
- });
- });
+ const findNoResultsMessage = () => wrapper.find('[data-testid="no-results"]');
+ const findCreateLabelButton = () => wrapper.find('[data-testid="create-label-button"]');
- describe('showNoMatchingResultsMessage', () => {
- it.each`
- searchKey | labels | labelsDescription | returnValue
- ${''} | ${[]} | ${'empty'} | ${false}
- ${'bug'} | ${[]} | ${'empty'} | ${true}
- ${''} | ${mockLabels} | ${'not empty'} | ${false}
- ${'bug'} | ${mockLabels} | ${'not empty'} | ${false}
- `(
- 'returns $returnValue when searchKey is "$searchKey" and visibleLabels is $labelsDescription',
- async ({ searchKey, labels, returnValue }) => {
- wrapper.setData({
- searchKey,
- });
-
- wrapper.vm.$store.dispatch('receiveLabelsSuccess', labels);
-
- await wrapper.vm.$nextTick();
-
- expect(wrapper.vm.showNoMatchingResultsMessage).toBe(returnValue);
- },
- );
+ describe('when loading labels', () => {
+ it('renders disabled search input field', async () => {
+ createComponent();
+ expect(findSearchInput().props('disabled')).toBe(true);
});
- });
-
- describe('methods', () => {
- describe('isLabelSelected', () => {
- it('returns true when provided `label` param is one of the selected labels', () => {
- expect(wrapper.vm.isLabelSelected(mockRegularLabel)).toBe(true);
- });
- it('returns false when provided `label` param is not one of the selected labels', () => {
- expect(wrapper.vm.isLabelSelected(mockLabels[2])).toBe(false);
- });
+ it('renders loading icon', async () => {
+ createComponent();
+ expect(findLoadingIcon().exists()).toBe(true);
});
- describe('handleComponentAppear', () => {
- it('calls `focusInput` on searchInput field', async () => {
- wrapper.vm.$refs.searchInput.focusInput = jest.fn();
-
- await wrapper.vm.handleComponentAppear();
-
- expect(wrapper.vm.$refs.searchInput.focusInput).toHaveBeenCalled();
- });
+ it('does not render labels list', async () => {
+ createComponent();
+ expect(findLabelsList().exists()).toBe(false);
});
+ });
- describe('handleComponentDisappear', () => {
- it('calls action `receiveLabelsSuccess` with empty array', () => {
- jest.spyOn(wrapper.vm, 'receiveLabelsSuccess');
-
- wrapper.vm.handleComponentDisappear();
-
- expect(wrapper.vm.receiveLabelsSuccess).toHaveBeenCalledWith([]);
- });
+ describe('when labels are loaded', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
});
- describe('handleCreateLabelClick', () => {
- it('calls actions `receiveLabelsSuccess` with empty array and `toggleDropdownContentsCreateView`', () => {
- jest.spyOn(wrapper.vm, 'receiveLabelsSuccess');
- jest.spyOn(wrapper.vm, 'toggleDropdownContentsCreateView');
-
- wrapper.vm.handleCreateLabelClick();
-
- expect(wrapper.vm.receiveLabelsSuccess).toHaveBeenCalledWith([]);
- expect(wrapper.vm.toggleDropdownContentsCreateView).toHaveBeenCalled();
- });
+ it('renders enabled search input field', async () => {
+ expect(findSearchInput().props('disabled')).toBe(false);
});
- describe('handleKeyDown', () => {
- it('decreases `currentHighlightItem` value by 1 when Up arrow key is pressed', () => {
- wrapper.setData({
- currentHighlightItem: 1,
- });
-
- wrapper.vm.handleKeyDown({
- keyCode: UP_KEY_CODE,
- });
-
- expect(wrapper.vm.currentHighlightItem).toBe(0);
- });
-
- it('increases `currentHighlightItem` value by 1 when Down arrow key is pressed', () => {
- wrapper.setData({
- currentHighlightItem: 1,
- });
-
- wrapper.vm.handleKeyDown({
- keyCode: DOWN_KEY_CODE,
- });
-
- expect(wrapper.vm.currentHighlightItem).toBe(2);
- });
-
- it('resets the search text when the Enter key is pressed', () => {
- wrapper.setData({
- currentHighlightItem: 1,
- searchKey: 'bug',
- });
-
- wrapper.vm.handleKeyDown({
- keyCode: ENTER_KEY_CODE,
- });
-
- expect(wrapper.vm.searchKey).toBe('');
- });
-
- it('calls action `updateSelectedLabels` with currently highlighted label when Enter key is pressed', () => {
- jest.spyOn(wrapper.vm, 'updateSelectedLabels').mockImplementation();
- wrapper.setData({
- currentHighlightItem: 1,
- });
-
- wrapper.vm.handleKeyDown({
- keyCode: ENTER_KEY_CODE,
- });
-
- expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([
- {
- ...mockLabels[1],
- set: true,
- },
- ]);
- });
-
- it('calls action `toggleDropdownContents` when Esc key is pressed', () => {
- jest.spyOn(wrapper.vm, 'toggleDropdownContents').mockImplementation();
- wrapper.setData({
- currentHighlightItem: 1,
- });
-
- wrapper.vm.handleKeyDown({
- keyCode: ESC_KEY_CODE,
- });
-
- expect(wrapper.vm.toggleDropdownContents).toHaveBeenCalled();
- });
-
- it('calls action `scrollIntoViewIfNeeded` in next tick when any key is pressed', () => {
- jest.spyOn(wrapper.vm, 'scrollIntoViewIfNeeded').mockImplementation();
- wrapper.setData({
- currentHighlightItem: 1,
- });
-
- wrapper.vm.handleKeyDown({
- keyCode: DOWN_KEY_CODE,
- });
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.scrollIntoViewIfNeeded).toHaveBeenCalled();
- });
- });
+ it('does not render loading icon', async () => {
+ expect(findLoadingIcon().exists()).toBe(false);
});
- describe('handleLabelClick', () => {
- beforeEach(() => {
- jest.spyOn(wrapper.vm, 'updateSelectedLabels').mockImplementation();
- });
-
- it('calls action `updateSelectedLabels` with provided `label` param', () => {
- wrapper.vm.handleLabelClick(mockRegularLabel);
-
- expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([mockRegularLabel]);
- });
+ it('renders labels list', async () => {
+ expect(findLabelsList().exists()).toBe(true);
+ expect(findLabels()).toHaveLength(2);
+ });
- it('calls action `toggleDropdownContents` when `state.allowMultiselect` is false', () => {
- jest.spyOn(wrapper.vm, 'toggleDropdownContents');
- wrapper.vm.$store.state.allowMultiselect = false;
+ it('changes highlighted label correctly on pressing down button', async () => {
+ expect(findLabels().at(0).attributes('highlight')).toBeUndefined();
- wrapper.vm.handleLabelClick(mockRegularLabel);
+ await findDropdownWrapper().trigger('keydown.down');
+ expect(findLabels().at(0).attributes('highlight')).toBe('true');
- expect(wrapper.vm.toggleDropdownContents).toHaveBeenCalled();
- });
+ await findDropdownWrapper().trigger('keydown.down');
+ expect(findLabels().at(1).attributes('highlight')).toBe('true');
+ expect(findLabels().at(0).attributes('highlight')).toBeUndefined();
});
- });
- describe('template', () => {
- it('renders gl-intersection-observer as component root', () => {
- expect(wrapper.find(GlIntersectionObserver).exists()).toBe(true);
- });
+ it('changes highlighted label correctly on pressing up button', async () => {
+ await findDropdownWrapper().trigger('keydown.down');
+ await findDropdownWrapper().trigger('keydown.down');
+ expect(findLabels().at(1).attributes('highlight')).toBe('true');
- it('renders gl-loading-icon component when `labelsFetchInProgress` prop is true', () => {
- wrapper.vm.$store.dispatch('requestLabels');
+ await findDropdownWrapper().trigger('keydown.up');
+ expect(findLabels().at(0).attributes('highlight')).toBe('true');
+ });
- return wrapper.vm.$nextTick(() => {
- const loadingIconEl = findLoadingIcon();
+ it('changes label selected state when Enter is pressed', async () => {
+ expect(findLabels().at(0).attributes('islabelset')).toBeUndefined();
+ await findDropdownWrapper().trigger('keydown.down');
+ await findDropdownWrapper().trigger('keydown.enter');
- expect(loadingIconEl.exists()).toBe(true);
- expect(loadingIconEl.attributes('class')).toContain('labels-fetch-loading');
- });
+ expect(findLabels().at(0).attributes('islabelset')).toBe('true');
});
- it('renders label search input element', () => {
- const searchInputEl = wrapper.find(GlSearchBoxByType);
+ it('emits `closeDropdown event` when Esc button is pressed', () => {
+ findDropdownWrapper().trigger('keydown.esc');
- expect(searchInputEl.exists()).toBe(true);
+ expect(wrapper.emitted('closeDropdown')).toEqual([[selectedLabels]]);
});
+ });
- it('renders label elements for all labels', () => {
- expect(wrapper.findAll(LabelItem)).toHaveLength(mockLabels.length);
+ it('when search returns 0 results', async () => {
+ createComponent({
+ queryHandler: jest.fn().mockResolvedValue({
+ data: {
+ workspace: {
+ labels: {
+ nodes: [],
+ },
+ },
+ },
+ }),
});
+ findSearchInput().vm.$emit('input', '123');
+ await waitForPromises();
+ await nextTick();
- it('renders label element with `highlight` set to true when value of `currentHighlightItem` is more than -1', () => {
- wrapper.setData({
- currentHighlightItem: 0,
- });
+ expect(findNoResultsMessage().isVisible()).toBe(true);
+ });
- return wrapper.vm.$nextTick(() => {
- const labelItemEl = findDropdownContent().find(LabelItem);
+ it('calls `createFlash` when fetching labels failed', async () => {
+ createComponent({ queryHandler: jest.fn().mockRejectedValue('Houston, we have a problem!') });
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ await waitForPromises();
+ expect(createFlash).toHaveBeenCalled();
+ });
- expect(labelItemEl.attributes('highlight')).toBe('true');
- });
- });
+ it('does not render footer on standalone dropdown', () => {
+ createComponent({ injected: { variant: DropdownVariant.Standalone } });
- it('renders element containing "No matching results" when `searchKey` does not match with any label', () => {
- wrapper.setData({
- searchKey: 'abc',
- });
+ expect(findDropdownFooter().exists()).toBe(false);
+ });
- return wrapper.vm.$nextTick(() => {
- const noMatchEl = findDropdownContent().find('li');
+ it('renders footer on sidebar dropdown', () => {
+ createComponent();
- expect(noMatchEl.isVisible()).toBe(true);
- expect(noMatchEl.text()).toContain('No matching results');
- });
- });
+ expect(findDropdownFooter().exists()).toBe(true);
+ });
- it('renders empty content while loading', () => {
- wrapper.vm.$store.state.labelsFetchInProgress = true;
+ it('renders footer on embedded dropdown', () => {
+ createComponent({ injected: { variant: DropdownVariant.Embedded } });
- return wrapper.vm.$nextTick(() => {
- const dropdownContent = findDropdownContent();
- const loadingIcon = findLoadingIcon();
+ expect(findDropdownFooter().exists()).toBe(true);
+ });
- expect(dropdownContent.exists()).toBe(true);
- expect(dropdownContent.isVisible()).toBe(true);
- expect(loadingIcon.exists()).toBe(true);
- expect(loadingIcon.isVisible()).toBe(true);
- });
- });
+ it('does not render create label button if `allowLabelCreate` is false', () => {
+ createComponent({ injected: { allowLabelCreate: false } });
- it('renders footer list items', () => {
- const footerLinks = findDropdownFooter().findAll(GlLink);
- const createLabelLink = footerLinks.at(0);
- const manageLabelsLink = footerLinks.at(1);
+ expect(findCreateLabelButton().exists()).toBe(false);
+ });
- expect(createLabelLink.exists()).toBe(true);
- expect(createLabelLink.text()).toBe('Create label');
- expect(manageLabelsLink.exists()).toBe(true);
- expect(manageLabelsLink.text()).toBe('Manage labels');
+ describe('when `allowLabelCreate` is true', () => {
+ beforeEach(() => {
+ createComponent();
});
- it('does not render "Create label" footer link when `state.allowLabelCreate` is `false`', () => {
- wrapper.vm.$store.state.allowLabelCreate = false;
-
- return wrapper.vm.$nextTick(() => {
- const createLabelLink = findDropdownFooter().findAll(GlLink).at(0);
-
- expect(createLabelLink.text()).not.toBe('Create label');
- });
+ it('renders create label button', () => {
+ expect(findCreateLabelButton().exists()).toBe(true);
});
- it('does not render footer list items when `state.variant` is "standalone"', () => {
- createComponent({ ...mockConfig, variant: 'standalone' });
- expect(findDropdownFooter().exists()).toBe(false);
- });
+ it('emits `toggleDropdownContentsCreateView` event on create label button click', () => {
+ findCreateLabelButton().vm.$emit('click');
- it('renders footer list items when `state.variant` is "embedded"', () => {
- expect(findDropdownFooter().exists()).toBe(true);
+ expect(wrapper.emitted('toggleDropdownContentsCreateView')).toEqual([[]]);
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js
index 8273bbdf7a7..3c2fd0c5acc 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js
@@ -5,7 +5,7 @@ import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_w
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents.vue';
import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_widget/store';
-import { mockConfig } from './mock_data';
+import { mockConfig, mockLabels } from './mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -19,6 +19,11 @@ const createComponent = (initialState = mockConfig, defaultProps = {}) => {
propsData: {
...defaultProps,
labelsCreateTitle: 'test',
+ selectedLabels: mockLabels,
+ allowMultiselect: true,
+ labelsListTitle: 'Assign labels',
+ footerCreateLabelTitle: 'create',
+ footerManageLabelTitle: 'manage',
},
localVue,
store,
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
index 66971446f47..e17dfd93efc 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
@@ -50,58 +50,6 @@ describe('LabelsSelectRoot', () => {
});
describe('methods', () => {
- describe('handleVuexActionDispatch', () => {
- it('calls `handleDropdownClose` when params `action.type` is `toggleDropdownContents` and state has `showDropdownButton` & `showDropdownContents` props `false`', () => {
- createComponent();
- jest.spyOn(wrapper.vm, 'handleDropdownClose').mockImplementation();
-
- wrapper.vm.handleVuexActionDispatch(
- { type: 'toggleDropdownContents' },
- {
- showDropdownButton: false,
- showDropdownContents: false,
- labels: [{ id: 1 }, { id: 2, touched: true }],
- },
- );
-
- expect(wrapper.vm.handleDropdownClose).toHaveBeenCalledWith(
- expect.arrayContaining([
- {
- id: 2,
- touched: true,
- },
- ]),
- );
- });
-
- it('calls `handleDropdownClose` with state.labels filterd using `set` prop when dropdown variant is `embedded`', () => {
- createComponent({
- ...mockConfig,
- variant: 'embedded',
- });
-
- jest.spyOn(wrapper.vm, 'handleDropdownClose').mockImplementation();
-
- wrapper.vm.handleVuexActionDispatch(
- { type: 'toggleDropdownContents' },
- {
- showDropdownButton: false,
- showDropdownContents: false,
- labels: [{ id: 1 }, { id: 2, set: true }],
- },
- );
-
- expect(wrapper.vm.handleDropdownClose).toHaveBeenCalledWith(
- expect.arrayContaining([
- {
- id: 2,
- set: true,
- },
- ]),
- );
- });
- });
-
describe('handleDropdownClose', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js
index 9e29030fb56..5dd8fc1b8b2 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js
@@ -48,6 +48,8 @@ export const mockConfig = {
labelsManagePath: '/gitlab-org/my-project/-/labels',
labelsFilterBasePath: '/gitlab-org/my-project/issues',
labelsFilterParam: 'label_name',
+ footerCreateLabelTitle: 'create',
+ footerManageLabelTitle: 'manage',
};
export const mockSuggestedColors = {
@@ -91,3 +93,26 @@ export const createLabelSuccessfulResponse = {
},
},
};
+
+export const labelsQueryResponse = {
+ data: {
+ workspace: {
+ labels: {
+ nodes: [
+ {
+ color: '#330066',
+ description: null,
+ id: 'gid://gitlab/ProjectLabel/1',
+ title: 'Label1',
+ },
+ {
+ color: '#2f7b2e',
+ description: null,
+ id: 'gid://gitlab/ProjectLabel/2',
+ title: 'Label2',
+ },
+ ],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js
index 27de7de2411..ee905410ffa 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js
@@ -1,8 +1,4 @@
-import MockAdapter from 'axios-mock-adapter';
-
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
-import axios from '~/lib/utils/axios_utils';
import * as actions from '~/vue_shared/components/sidebar/labels_select_widget/store/actions';
import * as types from '~/vue_shared/components/sidebar/labels_select_widget/store/mutation_types';
import defaultState from '~/vue_shared/components/sidebar/labels_select_widget/store/state';
@@ -72,90 +68,6 @@ describe('LabelsSelect Actions', () => {
});
});
- describe('requestLabels', () => {
- it('sets value of `state.labelsFetchInProgress` to `true`', (done) => {
- testAction(actions.requestLabels, {}, state, [{ type: types.REQUEST_LABELS }], [], done);
- });
- });
-
- describe('receiveLabelsSuccess', () => {
- it('sets provided labels to `state.labels`', (done) => {
- const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
-
- testAction(
- actions.receiveLabelsSuccess,
- labels,
- state,
- [{ type: types.RECEIVE_SET_LABELS_SUCCESS, payload: labels }],
- [],
- done,
- );
- });
- });
-
- describe('receiveLabelsFailure', () => {
- it('sets value `state.labelsFetchInProgress` to `false`', (done) => {
- testAction(
- actions.receiveLabelsFailure,
- {},
- state,
- [{ type: types.RECEIVE_SET_LABELS_FAILURE }],
- [],
- done,
- );
- });
-
- it('shows flash error', () => {
- actions.receiveLabelsFailure({ commit: () => {} });
-
- expect(createFlash).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
- });
- });
-
- describe('fetchLabels', () => {
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- state.labelsFetchPath = 'labels.json';
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('on success', () => {
- it('dispatches `requestLabels` & `receiveLabelsSuccess` actions', (done) => {
- const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
- mock.onGet(/labels.json/).replyOnce(200, labels);
-
- testAction(
- actions.fetchLabels,
- {},
- state,
- [],
- [{ type: 'requestLabels' }, { type: 'receiveLabelsSuccess', payload: labels }],
- done,
- );
- });
- });
-
- describe('on failure', () => {
- it('dispatches `requestLabels` & `receiveLabelsFailure` actions', (done) => {
- mock.onGet(/labels.json/).replyOnce(500, {});
-
- testAction(
- actions.fetchLabels,
- {},
- state,
- [],
- [{ type: 'requestLabels' }, { type: 'receiveLabelsFailure' }],
- done,
- );
- });
- });
- });
-
describe('updateSelectedLabels', () => {
it('updates `state.labels` based on provided `labels` param', (done) => {
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js
index 9e965cb33e8..1f0e0eee420 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js
@@ -67,58 +67,6 @@ describe('LabelsSelect Mutations', () => {
});
});
- describe(`${types.REQUEST_LABELS}`, () => {
- it('sets value of `state.labelsFetchInProgress` to true', () => {
- const state = {
- labelsFetchInProgress: false,
- };
- mutations[types.REQUEST_LABELS](state);
-
- expect(state.labelsFetchInProgress).toBe(true);
- });
- });
-
- describe(`${types.RECEIVE_SET_LABELS_SUCCESS}`, () => {
- const selectedLabels = [{ id: 2 }, { id: 4 }];
- const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
-
- it('sets value of `state.labelsFetchInProgress` to false', () => {
- const state = {
- selectedLabels,
- labelsFetchInProgress: true,
- };
- mutations[types.RECEIVE_SET_LABELS_SUCCESS](state, labels);
-
- expect(state.labelsFetchInProgress).toBe(false);
- });
-
- it('sets provided `labels` to `state.labels` along with `set` prop based on `state.selectedLabels`', () => {
- const selectedLabelIds = selectedLabels.map((label) => label.id);
- const state = {
- selectedLabels,
- labelsFetchInProgress: true,
- };
- mutations[types.RECEIVE_SET_LABELS_SUCCESS](state, labels);
-
- state.labels.forEach((label) => {
- if (selectedLabelIds.includes(label.id)) {
- expect(label.set).toBe(true);
- }
- });
- });
- });
-
- describe(`${types.RECEIVE_SET_LABELS_FAILURE}`, () => {
- it('sets value of `state.labelsFetchInProgress` to false', () => {
- const state = {
- labelsFetchInProgress: true,
- };
- mutations[types.RECEIVE_SET_LABELS_FAILURE](state);
-
- expect(state.labelsFetchInProgress).toBe(false);
- });
- });
-
describe(`${types.UPDATE_SELECTED_LABELS}`, () => {
let labels;
diff --git a/spec/frontend/vue_shared/components/url_sync_spec.js b/spec/frontend/vue_shared/components/url_sync_spec.js
index 86bbc146c5f..aefe6a5c3e8 100644
--- a/spec/frontend/vue_shared/components/url_sync_spec.js
+++ b/spec/frontend/vue_shared/components/url_sync_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import setWindowLocation from 'helpers/set_window_location_helper';
import { historyPushState } from '~/lib/utils/common_utils';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import UrlSyncComponent from '~/vue_shared/components/url_sync.vue';
@@ -15,9 +14,6 @@ jest.mock('~/lib/utils/common_utils', () => ({
describe('url sync component', () => {
let wrapper;
const mockQuery = { group_id: '5014437163714', project_ids: ['5014437608314'] };
- const TEST_HOST = 'http://testhost/';
-
- setWindowLocation(TEST_HOST);
const findButton = () => wrapper.find('button');
@@ -35,7 +31,9 @@ describe('url sync component', () => {
const expectUrlSync = (query, times, mergeUrlParamsReturnValue) => {
expect(mergeUrlParams).toHaveBeenCalledTimes(times);
- expect(mergeUrlParams).toHaveBeenCalledWith(query, TEST_HOST, { spreadArrays: true });
+ expect(mergeUrlParams).toHaveBeenCalledWith(query, window.location.href, {
+ spreadArrays: true,
+ });
expect(historyPushState).toHaveBeenCalledTimes(times);
expect(historyPushState).toHaveBeenCalledWith(mergeUrlParamsReturnValue);
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index d62c4a98b10..d3fec680b54 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -104,4 +104,15 @@ describe('User Avatar Link Component', () => {
);
});
});
+
+ describe('lazy', () => {
+ it('passes lazy prop to avatar image', () => {
+ createWrapper({
+ username: '',
+ lazy: true,
+ });
+
+ expect(wrapper.find(UserAvatarImage).props('lazy')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 0fd4d0dab87..5fe4eeb6061 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -85,6 +85,10 @@ describe('Web IDE link component', () => {
expectedActions: [ACTION_WEB_IDE, ACTION_EDIT],
},
{
+ props: { webIdeText: 'Test Web IDE' },
+ expectedActions: [{ ...ACTION_WEB_IDE_EDIT_FORK, text: 'Test Web IDE' }, ACTION_EDIT],
+ },
+ {
props: { isFork: true },
expectedActions: [ACTION_WEB_IDE_EDIT_FORK, ACTION_EDIT],
},
@@ -105,6 +109,10 @@ describe('Web IDE link component', () => {
expectedActions: [ACTION_WEB_IDE, ACTION_EDIT, ACTION_GITPOD_ENABLE],
},
{
+ props: { showEditButton: false, showGitpodButton: true, gitpodText: 'Test Gitpod' },
+ expectedActions: [ACTION_WEB_IDE, { ...ACTION_GITPOD_ENABLE, text: 'Test Gitpod' }],
+ },
+ {
props: { showEditButton: false },
expectedActions: [ACTION_WEB_IDE],
},
diff --git a/spec/frontend/vue_shared/directives/autofocusonshow_spec.js b/spec/frontend/vue_shared/directives/autofocusonshow_spec.js
index 1c9e89f99e9..59ce9f086c3 100644
--- a/spec/frontend/vue_shared/directives/autofocusonshow_spec.js
+++ b/spec/frontend/vue_shared/directives/autofocusonshow_spec.js
@@ -1,4 +1,3 @@
-import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
import autofocusonshow from '~/vue_shared/directives/autofocusonshow';
/**
@@ -7,8 +6,6 @@ import autofocusonshow from '~/vue_shared/directives/autofocusonshow';
* on underlying DOM methods.
*/
describe('AutofocusOnShow directive', () => {
- useMockIntersectionObserver();
-
describe('with input invisible on component render', () => {
let el;
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index bef538e1ff1..4d579fa61df 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -22,7 +22,7 @@ import {
REPORT_TYPE_SAST,
REPORT_TYPE_SECRET_DETECTION,
} from '~/vue_shared/security_reports/constants';
-import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_merge_request_download_paths.query.graphql';
+import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql';
import SecurityReportsApp from '~/vue_shared/security_reports/security_reports_app.vue';
jest.mock('~/flash');
diff --git a/spec/frontend_integration/diffs/diffs_interopability_spec.js b/spec/frontend_integration/diffs/diffs_interopability_spec.js
index 448641ed834..064e3d21180 100644
--- a/spec/frontend_integration/diffs/diffs_interopability_spec.js
+++ b/spec/frontend_integration/diffs/diffs_interopability_spec.js
@@ -1,4 +1,5 @@
import { waitFor } from '@testing-library/dom';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import initDiffsApp from '~/diffs';
import { createStore } from '~/mr_notes/stores';
@@ -111,9 +112,7 @@ describe('diffs third party interoperability', () => {
${'parallel view right side'} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
`('$desc', ({ view, rowSelector, codeSelector, expectation }) => {
beforeEach(async () => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}/${TEST_BASE_URL}/diffs?view=${view}`,
- });
+ setWindowLocation(`${TEST_HOST}/${TEST_BASE_URL}/diffs?view=${view}`);
vm = startDiffsApp();
diff --git a/spec/frontend_integration/ide/helpers/start.js b/spec/frontend_integration/ide/helpers/start.js
index cc6abd9e01f..4451c1ee946 100644
--- a/spec/frontend_integration/ide/helpers/start.js
+++ b/spec/frontend_integration/ide/helpers/start.js
@@ -1,5 +1,6 @@
/* global monaco */
+import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { initIde } from '~/ide';
import extendStore from '~/ide/stores/extend';
@@ -9,9 +10,7 @@ export default (container, { isRepoEmpty = false, path = '', mrId = '' } = {}) =
const projectName = isRepoEmpty ? 'lorem-ipsum-empty' : 'lorem-ipsum';
const pathSuffix = mrId ? `merge_requests/${mrId}` : `tree/master/-/${path}`;
- global.jsdom.reconfigure({
- url: `${TEST_HOST}/-/ide/project/gitlab-test/${projectName}/${pathSuffix}`,
- });
+ setWindowLocation(`${TEST_HOST}/-/ide/project/gitlab-test/${projectName}/${pathSuffix}`);
const el = document.createElement('div');
Object.assign(el.dataset, IDE_DATASET);
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 0dc3a9c85e7..faf19104731 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
describe 'with a single permission' do
let(:type) do
type_factory do |type|
- type.field :name, GraphQL::STRING_TYPE, null: true, authorize: permission_single
+ type.field :name, GraphQL::Types::String, null: true, authorize: permission_single
end
end
@@ -124,7 +124,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
let(:type) do
permissions = permission_collection
type_factory do |type|
- type.field :name, GraphQL::STRING_TYPE,
+ type.field :name, GraphQL::Types::String,
null: true,
authorize: permissions
end
@@ -332,7 +332,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
type_factory do |type|
type.graphql_name 'FakeIssueType'
type.authorize :read_issue
- type.field :id, GraphQL::ID_TYPE, null: false
+ type.field :id, GraphQL::Types::ID, null: false
end
end
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 06505536b09..3fa0dc95126 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -36,75 +36,66 @@ RSpec.describe GitlabSchema do
end
describe '.execute' do
- context 'with different types of users' do
- context 'when no context' do
- it 'returns DEFAULT_MAX_COMPLEXITY' do
- expect(GraphQL::Schema)
- .to receive(:execute)
- .with('query', hash_including(max_complexity: GitlabSchema::DEFAULT_MAX_COMPLEXITY))
-
- described_class.execute('query')
+ describe 'setting query `max_complexity` and `max_depth`' do
+ subject(:result) { described_class.execute('query', **kwargs).query }
+
+ shared_examples 'sets default limits' do
+ specify do
+ expect(result).to have_attributes(
+ max_complexity: GitlabSchema::DEFAULT_MAX_COMPLEXITY,
+ max_depth: GitlabSchema::DEFAULT_MAX_DEPTH
+ )
end
end
- context 'when no user' do
- it 'returns DEFAULT_MAX_COMPLEXITY' do
- expect(GraphQL::Schema)
- .to receive(:execute)
- .with('query', hash_including(max_complexity: GitlabSchema::DEFAULT_MAX_COMPLEXITY))
+ context 'with no context' do
+ let(:kwargs) { {} }
- described_class.execute('query', context: {})
- end
-
- it 'returns DEFAULT_MAX_DEPTH' do
- expect(GraphQL::Schema)
- .to receive(:execute)
- .with('query', hash_including(max_depth: GitlabSchema::DEFAULT_MAX_DEPTH))
-
- described_class.execute('query', context: {})
- end
+ include_examples 'sets default limits'
end
- context 'when a logged in user' do
- it 'returns AUTHENTICATED_COMPLEXITY' do
- expect(GraphQL::Schema).to receive(:execute)
- .with('query', hash_including(max_complexity: GitlabSchema::AUTHENTICATED_COMPLEXITY))
+ context 'with no :current_user' do
+ let(:kwargs) { { context: {} } }
- described_class.execute('query', context: { current_user: user })
- end
+ include_examples 'sets default limits'
+ end
- it 'returns AUTHENTICATED_MAX_DEPTH' do
- expect(GraphQL::Schema).to receive(:execute)
- .with('query', hash_including(max_depth: GitlabSchema::AUTHENTICATED_MAX_DEPTH))
+ context 'with anonymous user' do
+ let(:kwargs) { { context: { current_user: nil } } }
- described_class.execute('query', context: { current_user: user })
- end
+ include_examples 'sets default limits'
end
- context 'when an admin user' do
- it 'returns ADMIN_COMPLEXITY' do
- user = build :user, :admin
-
- expect(GraphQL::Schema).to receive(:execute)
- .with('query', hash_including(max_complexity: GitlabSchema::ADMIN_COMPLEXITY))
+ context 'with a logged in user' do
+ let(:kwargs) { { context: { current_user: user } } }
- described_class.execute('query', context: { current_user: user })
+ it 'sets authenticated user limits' do
+ expect(result).to have_attributes(
+ max_complexity: GitlabSchema::AUTHENTICATED_MAX_COMPLEXITY,
+ max_depth: GitlabSchema::AUTHENTICATED_MAX_DEPTH
+ )
end
end
- context 'when max_complexity passed on the query' do
- it 'returns what was passed on the query' do
- expect(GraphQL::Schema).to receive(:execute).with('query', hash_including(max_complexity: 1234))
+ context 'with an admin user' do
+ let(:kwargs) { { context: { current_user: build(:user, :admin) } } }
- described_class.execute('query', max_complexity: 1234)
+ it 'sets admin/authenticated user limits' do
+ expect(result).to have_attributes(
+ max_complexity: GitlabSchema::ADMIN_MAX_COMPLEXITY,
+ max_depth: GitlabSchema::AUTHENTICATED_MAX_DEPTH
+ )
end
end
- context 'when max_depth passed on the query' do
- it 'returns what was passed on the query' do
- expect(GraphQL::Schema).to receive(:execute).with('query', hash_including(max_depth: 1234))
+ context 'when limits passed as kwargs' do
+ let(:kwargs) { { max_complexity: 1234, max_depth: 4321 } }
- described_class.execute('query', max_depth: 1234)
+ it 'sets limits from the kwargs' do
+ expect(result).to have_attributes(
+ max_complexity: 1234,
+ max_depth: 4321
+ )
end
end
end
diff --git a/spec/graphql/mutations/base_mutation_spec.rb b/spec/graphql/mutations/base_mutation_spec.rb
new file mode 100644
index 00000000000..7939fadb37b
--- /dev/null
+++ b/spec/graphql/mutations/base_mutation_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Mutations::BaseMutation do
+ include GraphqlHelpers
+
+ describe 'argument nullability' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:context) { { current_user: user } }
+
+ subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
+
+ describe 'when using a mutation with correct argument declarations' do
+ context 'when argument is nullable and required' do
+ let(:mutation_class) do
+ Class.new(described_class) do
+ argument :foo, GraphQL::Types::String, required: :nullable
+ end
+ end
+
+ specify do
+ expect { subject.ready? }.to raise_error(ArgumentError, /must be provided: foo/)
+ end
+
+ specify do
+ expect { subject.ready?(foo: nil) }.not_to raise_error
+ end
+
+ specify do
+ expect { subject.ready?(foo: "bar") }.not_to raise_error
+ end
+ end
+
+ context 'when argument is required and NOT nullable' do
+ let(:mutation_class) do
+ Class.new(described_class) do
+ argument :foo, GraphQL::Types::String, required: true
+ end
+ end
+
+ specify do
+ expect { subject.ready? }.to raise_error(ArgumentError, /must be provided/)
+ end
+
+ specify do
+ expect { subject.ready?(foo: nil) }.to raise_error(ArgumentError, /must be provided/)
+ end
+
+ specify do
+ expect { subject.ready?(foo: "bar") }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/ci/runner/delete_spec.rb b/spec/graphql/mutations/ci/runner/delete_spec.rb
index 82873c96c3e..27e8236d593 100644
--- a/spec/graphql/mutations/ci/runner/delete_spec.rb
+++ b/spec/graphql/mutations/ci/runner/delete_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Mutations::Ci::Runner::Delete do
let(:mutation_params) { {} }
it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, "missing keyword: :id")
+ expect { subject }.to raise_error(ArgumentError, "Arguments must be provided: id")
end
end
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index 3db0d552a05..83150c3d7f6 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Mutations::Ci::Runner::Update do
let(:mutation_params) { {} }
it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, "missing keyword: :id")
+ expect { subject }.to raise_error(ArgumentError, "Arguments must be provided: id")
end
end
diff --git a/spec/graphql/mutations/design_management/delete_spec.rb b/spec/graphql/mutations/design_management/delete_spec.rb
index 3efa865c64b..93fff5e5103 100644
--- a/spec/graphql/mutations/design_management/delete_spec.rb
+++ b/spec/graphql/mutations/design_management/delete_spec.rb
@@ -86,9 +86,9 @@ RSpec.describe Mutations::DesignManagement::Delete do
end
end
- it 'runs no more than 28 queries' do
+ it 'runs no more than 29 queries' do
filenames.each(&:present?) # ignore setup
- # Queries: as of 2019-08-28
+ # Queries: as of 2021-07-22
# -------------
# 01. routing query
# 02. find project by id
@@ -100,25 +100,26 @@ RSpec.describe Mutations::DesignManagement::Delete do
# 09. find namespace by id
# 10. find group namespace by id
# 11. project.authorizations for user (same query as 5)
- # 12. project.project_features (same query as 3)
- # 13. project.authorizations for user (same query as 5)
- # 14. current designs by filename and issue
- # 15, 16 project.authorizations for user (same query as 5)
- # 17. find route by id and source_type
+ # 12. find user by id
+ # 13. project.project_features (same query as 3)
+ # 14. project.authorizations for user (same query as 5)
+ # 15. current designs by filename and issue
+ # 16, 17 project.authorizations for user (same query as 5)
+ # 18. find route by id and source_type
# ------------- our queries are below:
- # 18. start transaction 1
- # 19. start transaction 2
- # 20. find version by sha and issue
- # 21. exists version with sha and issue?
- # 22. leave transaction 2
- # 23. create version with sha and issue
- # 24. create design-version links
- # 25. validate version.actions.present?
- # 26. validate version.issue.present?
- # 27. validate version.sha is unique
- # 28. leave transaction 1
+ # 19. start transaction 1
+ # 20. start transaction 2
+ # 21. find version by sha and issue
+ # 22. exists version with sha and issue?
+ # 23. leave transaction 2
+ # 24. create version with sha and issue
+ # 25. create design-version links
+ # 26. validate version.actions.present?
+ # 27. validate version.issue.present?
+ # 28. validate version.sha is unique
+ # 29. leave transaction 1
#
- expect { run_mutation }.not_to exceed_query_limit(28)
+ expect { run_mutation }.not_to exceed_query_limit(29)
end
end
diff --git a/spec/graphql/mutations/groups/update_spec.rb b/spec/graphql/mutations/groups/update_spec.rb
new file mode 100644
index 00000000000..2118134e8e6
--- /dev/null
+++ b/spec/graphql/mutations/groups/update_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Groups::Update do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { { full_path: group.full_path } }
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_group) }
+
+ describe '#resolve' do
+ subject { described_class.new(object: group, context: { current_user: user }, field: nil).resolve(**params) }
+
+ RSpec.shared_examples 'updating the group shared runners setting' do
+ it 'updates the group shared runners setting' do
+ expect { subject }
+ .to change { group.reload.shared_runners_setting }.from('enabled').to('disabled_and_unoverridable')
+ end
+
+ it 'returns no errors' do
+ expect(subject).to eq(errors: [], group: group)
+ end
+
+ context 'with invalid params' do
+ let_it_be(:params) { { full_path: group.full_path, shared_runners_setting: 'inexistent_setting' } }
+
+ it 'doesn\'t update the shared_runners_setting' do
+ expect { subject }
+ .not_to change { group.reload.shared_runners_setting }
+ end
+
+ it 'returns an error' do
+ expect(subject).to eq(
+ group: nil,
+ errors: ["Update shared runners state must be one of: #{::Namespace::SHARED_RUNNERS_SETTINGS.join(', ')}"]
+ )
+ end
+ end
+ end
+
+ RSpec.shared_examples 'denying access to group shared runners setting' do
+ it 'raises Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'changing shared runners setting' do
+ let_it_be(:params) do
+ { full_path: group.full_path,
+ shared_runners_setting: 'disabled_and_unoverridable' }
+ end
+
+ where(:user_role, :shared_examples_name) do
+ :owner | 'updating the group shared runners setting'
+ :developer | 'denying access to group shared runners setting'
+ :reporter | 'denying access to group shared runners setting'
+ :guest | 'denying access to group shared runners setting'
+ :anonymous | 'denying access to group shared runners setting'
+ end
+
+ with_them do
+ before do
+ group.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index 80f43338bb5..bb57ad4c404 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -131,6 +131,28 @@ RSpec.describe Mutations::Issues::Update do
expect(issue.reload.labels).to match_array([project_label, label_2])
end
+
+ context 'when setting labels with label_ids' do
+ it 'replaces existing labels with provided ones' do
+ expect(issue.reload.labels).to match_array([project_label])
+
+ mutation_params[:label_ids] = [label_1.id, label_2.id]
+
+ subject
+
+ expect(issue.reload.labels).to match_array([label_1, label_2])
+ end
+
+ it 'raises error when label_ids is combined with remove_label_ids' do
+ expect { mutation.ready?(label_ids: [label_1.id, label_2.id], remove_label_ids: [label_1.id]) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'labelIds is mutually exclusive with any of addLabelIds or removeLabelIds')
+ end
+
+ it 'raises error when label_ids is combined with add_label_ids' do
+ expect { mutation.ready?(label_ids: [label_1.id, label_2.id], add_label_ids: [label_2.id]) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'labelIds is mutually exclusive with any of addLabelIds or removeLabelIds')
+ end
+ end
end
context 'when changing type' do
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index 8d2ae238bfe..d77a0b6242e 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe Resolvers::BaseResolver do
let(:resolver) do
Class.new(described_class) do
- argument :test, ::GraphQL::INT_TYPE, required: false
- type [::GraphQL::INT_TYPE], null: true
+ argument :test, ::GraphQL::Types::Int, required: false
+ type [::GraphQL::Types::Int], null: true
def resolve(test: 100)
process(object)
@@ -22,7 +22,7 @@ RSpec.describe Resolvers::BaseResolver do
let(:last_resolver) do
Class.new(described_class) do
- type [::GraphQL::INT_TYPE], null: true
+ type [::GraphQL::Types::Int], null: true
def resolve(**args)
[1, 2]
@@ -36,11 +36,11 @@ RSpec.describe Resolvers::BaseResolver do
context 'for a connection of scalars' do
let(:resolver) do
Class.new(described_class) do
- type ::GraphQL::INT_TYPE.connection_type, null: true
+ type ::GraphQL::Types::Int.connection_type, null: true
end
end
- it { is_expected.to eq(::GraphQL::INT_TYPE) }
+ it { is_expected.to eq(::GraphQL::Types::Int) }
end
context 'for a connection of objects' do
@@ -64,21 +64,21 @@ RSpec.describe Resolvers::BaseResolver do
context 'for a list type' do
let(:resolver) do
Class.new(described_class) do
- type [::GraphQL::STRING_TYPE], null: true
+ type [::GraphQL::Types::String], null: true
end
end
- it { is_expected.to eq(::GraphQL::STRING_TYPE) }
+ it { is_expected.to eq(::GraphQL::Types::String) }
end
context 'for a scalar type' do
let(:resolver) do
Class.new(described_class) do
- type ::GraphQL::BOOLEAN_TYPE, null: true
+ type ::GraphQL::Types::Boolean, null: true
end
end
- it { is_expected.to eq(::GraphQL::BOOLEAN_TYPE) }
+ it { is_expected.to eq(::GraphQL::Types::Boolean) }
end
end
@@ -88,7 +88,7 @@ RSpec.describe Resolvers::BaseResolver do
end
it 'has the correct (singular) type' do
- expect(resolver.single.type).to eq(::GraphQL::INT_TYPE)
+ expect(resolver.single.type).to eq(::GraphQL::Types::Int)
end
it 'returns the same subclass every time' do
@@ -105,10 +105,10 @@ RSpec.describe Resolvers::BaseResolver do
describe '.when_single' do
let(:resolver) do
Class.new(described_class) do
- type [::GraphQL::INT_TYPE], null: true
+ type [::GraphQL::Types::Int], null: true
when_single do
- argument :foo, ::GraphQL::INT_TYPE, required: true
+ argument :foo, ::GraphQL::Types::Int, required: true
end
def resolve(foo: 1)
@@ -138,14 +138,14 @@ RSpec.describe Resolvers::BaseResolver do
context 'multiple when_single blocks' do
let(:resolver) do
Class.new(described_class) do
- type [::GraphQL::INT_TYPE], null: true
+ type [::GraphQL::Types::Int], null: true
when_single do
- argument :foo, ::GraphQL::INT_TYPE, required: true
+ argument :foo, ::GraphQL::Types::Int, required: true
end
when_single do
- argument :bar, ::GraphQL::INT_TYPE, required: true
+ argument :bar, ::GraphQL::Types::Int, required: true
end
def resolve(foo: 1, bar: 2)
@@ -168,7 +168,7 @@ RSpec.describe Resolvers::BaseResolver do
let(:subclass) do
Class.new(resolver) do
when_single do
- argument :inc, ::GraphQL::INT_TYPE, required: true
+ argument :inc, ::GraphQL::Types::Int, required: true
end
def resolve(foo:, inc:)
@@ -194,7 +194,7 @@ RSpec.describe Resolvers::BaseResolver do
context 'when the resolver returns early' do
let(:resolver) do
Class.new(described_class) do
- type [::GraphQL::STRING_TYPE], null: true
+ type [::GraphQL::Types::String], null: true
def ready?(**args)
[false, %w[early return]]
@@ -237,14 +237,14 @@ RSpec.describe Resolvers::BaseResolver do
context 'when field is a connection' do
it 'increases complexity based on arguments' do
- field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: described_class, null: false, max_page_size: 1)
+ field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 1)
expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 3
expect(field.to_graphql.complexity.call({}, { search: 'foo' }, 1)).to eq 7
end
it 'does not increase complexity when filtering by iids' do
- field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
+ field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 6
expect(field.to_graphql.complexity.call({}, { sort: 'foo', iid: 1 }, 1)).to eq 3
diff --git a/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb b/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
index 8d15d7eda1b..852aaf66201 100644
--- a/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
+++ b/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe ::CachingArrayResolver do
Class.new(::Resolvers::BaseResolver) do
include mod
type [::Types::UserType], null: true
- argument :is_admin, ::GraphQL::BOOLEAN_TYPE, required: false
+ argument :is_admin, ::GraphQL::Types::Boolean, required: false
def query_input(is_admin:)
is_admin
@@ -50,7 +50,7 @@ RSpec.describe ::CachingArrayResolver do
Class.new(::Resolvers::BaseResolver) do
include mod
type [::Types::UserType], null: true
- argument :username, ::GraphQL::STRING_TYPE, required: false
+ argument :username, ::GraphQL::Types::String, required: false
def query_input(username:)
username
diff --git a/spec/graphql/resolvers/concerns/resolves_ids_spec.rb b/spec/graphql/resolvers/concerns/resolves_ids_spec.rb
new file mode 100644
index 00000000000..1dd27c0eff0
--- /dev/null
+++ b/spec/graphql/resolvers/concerns/resolves_ids_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResolvesIds do
+ # gid://gitlab/Project/6
+ # gid://gitlab/Issue/6
+ # gid://gitlab/Project/6 gid://gitlab/Issue/6
+ context 'with a single project' do
+ let(:ids) { 'gid://gitlab/Project/6' }
+ let(:type) { ::Types::GlobalIDType[::Project] }
+
+ it 'returns the correct array' do
+ expect(resolve_ids).to match_array(['6'])
+ end
+ end
+
+ context 'with a single issue' do
+ let(:ids) { 'gid://gitlab/Issue/9' }
+ let(:type) { ::Types::GlobalIDType[::Issue] }
+
+ it 'returns the correct array' do
+ expect(resolve_ids).to match_array(['9'])
+ end
+ end
+
+ context 'with multiple users' do
+ let(:ids) { ['gid://gitlab/User/7', 'gid://gitlab/User/13', 'gid://gitlab/User/21'] }
+ let(:type) { ::Types::GlobalIDType[::User] }
+
+ it 'returns the correct array' do
+ expect(resolve_ids).to match_array(%w[7 13 21])
+ end
+ end
+
+ def mock_resolver
+ Class.new(GraphQL::Schema::Resolver) { extend ResolvesIds }
+ end
+
+ def resolve_ids
+ mock_resolver.resolve_ids(ids, type)
+ end
+end
diff --git a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
index 3dffda75e08..6f6855c8f84 100644
--- a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe ResolvesPipelines do
end
it 'increases field complexity based on arguments' do
- field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, null: false, max_page_size: 1)
+ field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, null: false, max_page_size: 1)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 2
expect(field.to_graphql.complexity.call({}, { sha: 'foo' }, 1)).to eq 4
diff --git a/spec/graphql/resolvers/echo_resolver_spec.rb b/spec/graphql/resolvers/echo_resolver_spec.rb
index 4f48e5e0d7a..59a121ac7de 100644
--- a/spec/graphql/resolvers/echo_resolver_spec.rb
+++ b/spec/graphql/resolvers/echo_resolver_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Resolvers::EchoResolver do
let(:text) { 'Message test' }
specify do
- expect(described_class).to have_non_null_graphql_type(::GraphQL::STRING_TYPE)
+ expect(described_class).to have_non_null_graphql_type(::GraphQL::Types::String)
end
describe '#resolve' do
diff --git a/spec/graphql/resolvers/error_tracking/sentry_detailed_error_resolver_spec.rb b/spec/graphql/resolvers/error_tracking/sentry_detailed_error_resolver_spec.rb
index bf8d2139c82..2aef483ac95 100644
--- a/spec/graphql/resolvers/error_tracking/sentry_detailed_error_resolver_spec.rb
+++ b/spec/graphql/resolvers/error_tracking/sentry_detailed_error_resolver_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Resolvers::ErrorTracking::SentryDetailedErrorResolver do
end
context 'error matched' do
- let(:detailed_error) { build(:detailed_error_tracking_error) }
+ let(:detailed_error) { build(:error_tracking_sentry_detailed_error) }
before do
allow(issue_details_service).to receive(:execute)
diff --git a/spec/graphql/resolvers/groups_resolver_spec.rb b/spec/graphql/resolvers/groups_resolver_spec.rb
new file mode 100644
index 00000000000..e53ca674163
--- /dev/null
+++ b/spec/graphql/resolvers/groups_resolver_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::GroupsResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:group) { create(:group, name: 'public-group') }
+ let_it_be(:private_group) { create(:group, :private, name: 'private-group') }
+ let_it_be(:subgroup1) { create(:group, parent: group, name: 'Subgroup') }
+ let_it_be(:subgroup2) { create(:group, parent: subgroup1, name: 'Test Subgroup 2') }
+ let_it_be(:private_subgroup1) { create(:group, :private, parent: private_group, name: 'Subgroup1') }
+ let_it_be(:private_subgroup2) { create(:group, :private, parent: private_subgroup1, name: 'Subgroup2') }
+ let_it_be(:user) { create(:user) }
+
+ before_all do
+ private_group.add_developer(user)
+ end
+
+ shared_examples 'access to all public descendant groups' do
+ it 'returns all public descendant groups of the parent group ordered by ASC name' do
+ is_expected.to eq([subgroup1, subgroup2])
+ end
+ end
+
+ shared_examples 'access to all public subgroups' do
+ it 'returns all public subgroups of the parent group' do
+ is_expected.to contain_exactly(subgroup1)
+ end
+ end
+
+ shared_examples 'returning empty results' do
+ it 'returns empty results' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when parent group is public' do
+ subject { resolve(described_class, obj: group, args: params, ctx: { current_user: current_user }) }
+
+ context 'when `include_parent_descendants` is false' do
+ let(:params) { { include_parent_descendants: false } }
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'access to all public subgroups'
+ end
+
+ context 'when user is logged in' do
+ let(:current_user) { user }
+
+ it_behaves_like 'access to all public subgroups'
+ end
+ end
+
+ context 'when `include_parent_descendants` is true' do
+ let(:params) { { include_parent_descendants: true } }
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'access to all public descendant groups'
+ end
+
+ context 'when user is logged in' do
+ let(:current_user) { user }
+
+ it_behaves_like 'access to all public descendant groups'
+
+ context 'with owned argument set as true' do
+ before do
+ subgroup1.add_owner(current_user)
+ params[:owned] = true
+ end
+
+ it 'returns only descendant groups owned by the user' do
+ is_expected.to contain_exactly(subgroup1)
+ end
+ end
+
+ context 'with search argument' do
+ it 'returns only descendant groups with matching name or path' do
+ params[:search] = 'Test'
+ is_expected.to contain_exactly(subgroup2)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when parent group is private' do
+ subject { resolve(described_class, obj: private_group, args: params, ctx: { current_user: current_user }) }
+
+ context 'when `include_parent_descendants` is true' do
+ let(:params) { { include_parent_descendants: true } }
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'returning empty results'
+ end
+
+ context 'when user is logged in' do
+ let(:current_user) { user }
+
+ it 'returns all private descendant groups' do
+ is_expected.to contain_exactly(private_subgroup1, private_subgroup2)
+ end
+ end
+ end
+
+ context 'when `include_parent_descendants` is false' do
+ let(:params) { { include_parent_descendants: false } }
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'returning empty results'
+ end
+
+ context 'when user is logged in' do
+ let(:current_user) { user }
+
+ it 'returns private subgroups' do
+ is_expected.to contain_exactly(private_subgroup1)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 9b329e961cc..6e187e57729 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:other_project) { create(:project, group: group) }
- let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:started_milestone) { create(:milestone, project: project, title: "started milestone", start_date: 1.day.ago) }
let_it_be(:assignee) { create(:user) }
- let_it_be(:issue1) { create(:incident, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago, milestone: milestone) }
+ let_it_be(:issue1) { create(:incident, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago, milestone: started_milestone) }
let_it_be(:issue2) { create(:issue, project: project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago, assignees: [assignee]) }
let_it_be(:issue3) { create(:issue, project: other_project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago, assignees: [assignee]) }
let_it_be(:issue4) { create(:issue) }
@@ -43,7 +43,63 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'filters by milestone' do
- expect(resolve_issues(milestone_title: [milestone.title])).to contain_exactly(issue1)
+ expect(resolve_issues(milestone_title: [started_milestone.title])).to contain_exactly(issue1)
+ end
+
+ describe 'filtering by milestone wildcard id' do
+ let_it_be(:upcoming_milestone) { create(:milestone, project: project, title: "upcoming milestone", start_date: 1.day.ago, due_date: 1.day.from_now) }
+ let_it_be(:past_milestone) { create(:milestone, project: project, title: "past milestone", due_date: 1.day.ago) }
+ let_it_be(:future_milestone) { create(:milestone, project: project, title: "future milestone", start_date: 1.day.from_now) }
+ let_it_be(:issue5) { create(:issue, project: project, state: :opened, milestone: upcoming_milestone) }
+ let_it_be(:issue6) { create(:issue, project: project, state: :opened, milestone: past_milestone) }
+ let_it_be(:issue7) { create(:issue, project: project, state: :opened, milestone: future_milestone) }
+
+ let(:wildcard_started) { 'STARTED' }
+ let(:wildcard_upcoming) { 'UPCOMING' }
+ let(:wildcard_any) { 'ANY' }
+ let(:wildcard_none) { 'NONE' }
+
+ it 'returns issues with started milestone' do
+ expect(resolve_issues(milestone_wildcard_id: wildcard_started)).to contain_exactly(issue1, issue5)
+ end
+
+ it 'returns issues with upcoming milestone' do
+ expect(resolve_issues(milestone_wildcard_id: wildcard_upcoming)).to contain_exactly(issue5)
+ end
+
+ it 'returns issues with any milestone' do
+ expect(resolve_issues(milestone_wildcard_id: wildcard_any)).to contain_exactly(issue1, issue5, issue6, issue7)
+ end
+
+ it 'returns issues with no milestone' do
+ expect(resolve_issues(milestone_wildcard_id: wildcard_none)).to contain_exactly(issue2)
+ end
+
+ it 'raises a mutually exclusive filter error when wildcard and title are provided' do
+ expect do
+ resolve_issues(milestone_title: ["started milestone"], milestone_wildcard_id: wildcard_started)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [milestoneTitle, milestoneWildcardId] arguments is allowed at the same time.')
+ end
+
+ context 'negated filtering' do
+ it 'returns issues matching the searched title after applying a negated filter' do
+ expect(resolve_issues(milestone_title: ['past milestone'], not: { milestone_wildcard_id: wildcard_upcoming })).to contain_exactly(issue6)
+ end
+
+ it 'returns issues excluding the ones with started milestone' do
+ expect(resolve_issues(not: { milestone_wildcard_id: wildcard_started })).to contain_exactly(issue7)
+ end
+
+ it 'returns issues excluding the ones with upcoming milestone' do
+ expect(resolve_issues(not: { milestone_wildcard_id: wildcard_upcoming })).to contain_exactly(issue6)
+ end
+
+ it 'raises a mutually exclusive filter error when wildcard and title are provided as negated filters' do
+ expect do
+ resolve_issues(not: { milestone_title: ["started milestone"], milestone_wildcard_id: wildcard_started })
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [milestoneTitle, milestoneWildcardId] arguments is allowed at the same time.')
+ end
+ end
end
it 'filters by two assignees' do
@@ -169,7 +225,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'returns issues without the specified milestone' do
- expect(resolve_issues(not: { milestone_title: [milestone.title] })).to contain_exactly(issue2)
+ expect(resolve_issues(not: { milestone_title: [started_milestone.title] })).to contain_exactly(issue2)
end
it 'returns issues without the specified assignee_usernames' do
@@ -337,13 +393,13 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'finds a specific issue with iid', :request_store do
- result = batch_sync(max_queries: 4) { resolve_issues(iid: issue1.iid).to_a }
+ result = batch_sync(max_queries: 5) { resolve_issues(iid: issue1.iid).to_a }
expect(result).to contain_exactly(issue1)
end
it 'batches queries that only include IIDs', :request_store do
- result = batch_sync(max_queries: 4) do
+ result = batch_sync(max_queries: 5) do
[issue1, issue2]
.map { |issue| resolve_issues(iid: issue.iid.to_s) }
.flat_map(&:to_a)
@@ -353,7 +409,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'finds a specific issue with iids', :request_store do
- result = batch_sync(max_queries: 4) do
+ result = batch_sync(max_queries: 5) do
resolve_issues(iids: [issue1.iid]).to_a
end
@@ -407,7 +463,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'increases field complexity based on arguments' do
- field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
+ field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 4
expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
diff --git a/spec/graphql/resolvers/merge_requests_count_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_count_resolver_spec.rb
new file mode 100644
index 00000000000..da177da93a6
--- /dev/null
+++ b/spec/graphql/resolvers/merge_requests_count_resolver_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::MergeRequestsCountResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project1) { create(:project, :repository, :public) }
+ let_it_be(:project2) { create(:project, :repository, repository_access_level: ProjectFeature::PRIVATE) }
+ let_it_be(:issue) { create(:issue, project: project1) }
+ let_it_be(:merge_request_closing_issue1) { create(:merge_requests_closing_issues, issue: issue) }
+ let_it_be(:merge_request_closing_issue2) do
+ merge_request = create(:merge_request, source_project: project2)
+ create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request)
+ end
+
+ specify do
+ expect(described_class).to have_nullable_graphql_type(GraphQL::Types::Int)
+ end
+
+ subject { batch_sync { resolve_merge_requests_count(issue) } }
+
+ context "when user can only view an issue's closing merge requests that are public" do
+ it 'returns the count of the merge requests closing the issue' do
+ expect(subject).to eq(1)
+ end
+ end
+
+ context "when user can view an issue's closing merge requests that are both public and private" do
+ before do
+ project2.add_reporter(user)
+ end
+
+ it 'returns the count of the merge requests closing the issue' do
+ expect(subject).to eq(2)
+ end
+ end
+ end
+
+ def resolve_merge_requests_count(obj)
+ resolve(described_class, obj: obj, ctx: { current_user: user })
+ end
+end
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index aec6c6c6708..64ee0d4f9cc 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -303,6 +303,29 @@ RSpec.describe Resolvers::MergeRequestsResolver do
expect { resolve_mr(project, sort: :merged_at_desc, labels: %w[a b]) }.not_to raise_error
end
end
+
+ context 'when sorting by closed at' do
+ before do
+ merge_request_1.metrics.update!(latest_closed_at: 10.days.ago)
+ merge_request_3.metrics.update!(latest_closed_at: 5.days.ago)
+ end
+
+ it 'sorts merge requests ascending' do
+ expect(resolve_mr(project, sort: :closed_at_asc))
+ .to match_array(mrs)
+ .and be_sorted(->(mr) { [closed_at(mr), -mr.id] })
+ end
+
+ it 'sorts merge requests descending' do
+ expect(resolve_mr(project, sort: :closed_at_desc))
+ .to match_array(mrs)
+ .and be_sorted(->(mr) { [-closed_at(mr), -mr.id] })
+ end
+
+ def closed_at(mr)
+ nils_last(mr.metrics.latest_closed_at)
+ end
+ end
end
end
end
diff --git a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
index 618d012bd6d..b1f50a4a4a5 100644
--- a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
@@ -145,7 +145,7 @@ RSpec.describe Resolvers::NamespaceProjectsResolver do
end
it 'has an high complexity regardless of arguments' do
- field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
+ field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 24
expect(field.to_graphql.complexity.call({}, { include_subgroups: true }, 1)).to eq 24
diff --git a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
new file mode 100644
index 00000000000..82b05937aa3
--- /dev/null
+++ b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::PaginatedTreeResolver do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ specify do
+ expect(described_class).to have_nullable_graphql_type(Types::Tree::TreeType.connection_type)
+ end
+
+ describe '#resolve', :aggregate_failures do
+ subject { resolve_repository(args, opts) }
+
+ let(:args) { { ref: 'master' } }
+ let(:opts) { {} }
+
+ let(:start_cursor) { subject.start_cursor }
+ let(:end_cursor) { subject.end_cursor }
+ let(:items) { subject.items }
+ let(:entries) { items.first.entries }
+
+ it 'resolves to a collection with a tree object' do
+ expect(items.first).to be_an_instance_of(Tree)
+
+ expect(start_cursor).to be_nil
+ expect(end_cursor).to be_blank
+ expect(entries.count).to eq(repository.tree.entries.count)
+ end
+
+ context 'with recursive option' do
+ let(:args) { super().merge(recursive: true) }
+
+ it 'resolve to a recursive tree' do
+ expect(entries[4].path).to eq('files/html')
+ end
+ end
+
+ context 'with limited max_page_size' do
+ let(:opts) { { max_page_size: 5 } }
+
+ it 'resolves to a pagination collection with a tree object' do
+ expect(items.first).to be_an_instance_of(Tree)
+
+ expect(start_cursor).to be_nil
+ expect(end_cursor).to be_present
+ expect(entries.count).to eq(5)
+ end
+ end
+
+ context 'when repository does not exist' do
+ before do
+ allow(repository).to receive(:exists?).and_return(false)
+ end
+
+ it 'returns nil' do
+ is_expected.to be(nil)
+ end
+ end
+
+ describe 'Cursor pagination' do
+ context 'when cursor is invalid' do
+ let(:args) { super().merge(after: 'invalid') }
+
+ it { expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError) }
+ end
+
+ it 'returns all tree entries during cursor pagination' do
+ cursor = nil
+
+ expected_entries = repository.tree.entries.map(&:path)
+ collected_entries = []
+
+ loop do
+ result = resolve_repository(args.merge(after: cursor), max_page_size: 10)
+
+ collected_entries += result.items.first.entries.map(&:path)
+
+ expect(result.start_cursor).to eq(cursor)
+ cursor = result.end_cursor
+
+ break if cursor.blank?
+ end
+
+ expect(collected_entries).to match_array(expected_entries)
+ end
+ end
+ end
+
+ def resolve_repository(args, opts = {})
+ field_options = described_class.field_options.merge(
+ owner: resolver_parent,
+ name: 'field_value'
+ ).merge(opts)
+
+ field = ::Types::BaseField.new(**field_options)
+ resolve_field(field, repository, args: args, object_type: resolver_parent)
+ end
+end
diff --git a/spec/graphql/resolvers/project_resolver_spec.rb b/spec/graphql/resolvers/project_resolver_spec.rb
index 72a01b1c574..d0661c27b95 100644
--- a/spec/graphql/resolvers/project_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_resolver_spec.rb
@@ -28,8 +28,8 @@ RSpec.describe Resolvers::ProjectResolver do
end
it 'does not increase complexity depending on number of load limits' do
- field1 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
- field2 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
+ field1 = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class, null: false, max_page_size: 100)
+ field2 = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class, null: false, max_page_size: 1)
expect(field1.to_graphql.complexity.call({}, {}, 1)).to eq 2
expect(field2.to_graphql.complexity.call({}, {}, 1)).to eq 2
diff --git a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
index 8c36153d485..75b9be7dfe7 100644
--- a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
@@ -86,11 +86,11 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
context 'when Jira connection is not valid' do
before do
WebMock.stub_request(:get, 'https://jira.example.com/rest/api/2/project')
- .to_raise(JIRA::HTTPError.new(double(message: 'Some failure.')))
+ .to_raise(JIRA::HTTPError.new(double(message: '{"errorMessages":["Some failure"]}')))
end
it 'raises failure error' do
- expect { resolve_jira_projects }.to raise_error('Jira request error: Some failure.')
+ expect { resolve_jira_projects }.to raise_error('An error occurred while requesting data from Jira: Some failure. Check your Jira integration configuration and try again.')
end
end
end
diff --git a/spec/graphql/resolvers/terraform/states_resolver_spec.rb b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
index 91d48cd782b..012c74ce398 100644
--- a/spec/graphql/resolvers/terraform/states_resolver_spec.rb
+++ b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
@@ -43,7 +43,8 @@ RSpec.describe Resolvers::Terraform::StatesResolver.single do
it do
expect(subject).to be_present
- expect(subject.type.to_s).to eq('String!')
+ expect(subject.type).to be_kind_of GraphQL::Schema::NonNull
+ expect(subject.type.unwrap).to eq GraphQL::Types::String
expect(subject.description).to be_present
end
end
diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb
index bb4938c751f..f45f528fe7e 100644
--- a/spec/graphql/resolvers/timelog_resolver_spec.rb
+++ b/spec/graphql/resolvers/timelog_resolver_spec.rb
@@ -5,115 +5,306 @@ require 'spec_helper'
RSpec.describe Resolvers::TimelogResolver do
include GraphqlHelpers
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :empty_repo, :public, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:error_class) { Gitlab::Graphql::Errors::ArgumentError }
+
specify do
expect(described_class).to have_non_null_graphql_type(::Types::TimelogType.connection_type)
end
- context "with a group" do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :empty_repo, :public, group: group) }
+ shared_examples_for 'with a project' do
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:timelog1) { create(:issue_timelog, issue: issue, spent_at: 2.days.ago.beginning_of_day) }
+ let_it_be(:timelog2) { create(:issue_timelog, issue: issue, spent_at: 2.days.ago.end_of_day) }
+ let_it_be(:timelog3) { create(:merge_request_timelog, merge_request: merge_request, spent_at: 10.days.ago) }
- describe '#resolve' do
- let_it_be(:short_time_ago) { 5.days.ago.beginning_of_day }
- let_it_be(:medium_time_ago) { 15.days.ago.beginning_of_day }
+ let(:args) { { start_time: 6.days.ago, end_time: 2.days.ago.noon } }
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ it 'finds all timelogs within given dates' do
+ timelogs = resolve_timelogs(**args)
- let_it_be(:timelog1) { create(:issue_timelog, issue: issue, spent_at: short_time_ago.beginning_of_day) }
- let_it_be(:timelog2) { create(:issue_timelog, issue: issue, spent_at: short_time_ago.end_of_day) }
- let_it_be(:timelog3) { create(:merge_request_timelog, merge_request: merge_request, spent_at: medium_time_ago) }
+ expect(timelogs).to contain_exactly(timelog1)
+ end
- let(:args) { { start_time: short_time_ago, end_time: short_time_ago.noon } }
+ context 'when no dates specified' do
+ let(:args) { {} }
it 'finds all timelogs' do
- timelogs = resolve_timelogs
+ timelogs = resolve_timelogs(**args)
expect(timelogs).to contain_exactly(timelog1, timelog2, timelog3)
end
+ end
- it 'finds all timelogs within given dates' do
+ context 'when only start_time present' do
+ let(:args) { { start_time: 2.days.ago.noon } }
+
+ it 'finds timelogs after the start_time' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog2)
+ end
+ end
+
+ context 'when only end_time present' do
+ let(:args) { { end_time: 2.days.ago.noon } }
+
+ it 'finds timelogs before the end_time' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1, timelog3)
+ end
+ end
+
+ context 'when start_time and end_date are present' do
+ let(:args) { { start_time: 6.days.ago, end_date: 2.days.ago } }
+
+ it 'finds timelogs until the end of day of end_date' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1, timelog2)
+ end
+ end
+
+ context 'when start_date and end_time are present' do
+ let(:args) { { start_date: 6.days.ago, end_time: 2.days.ago.noon } }
+
+ it 'finds all timelogs within start_date and end_time' do
timelogs = resolve_timelogs(**args)
expect(timelogs).to contain_exactly(timelog1)
end
+ end
- context 'when only start_date is present' do
- let(:args) { { start_date: short_time_ago } }
+ it 'return nothing when user has insufficient permissions' do
+ project2 = create(:project, :empty_repo, :private)
+ issue2 = create(:issue, project: project2)
+ create(:issue_timelog, issue: issue2, spent_at: 2.days.ago.beginning_of_day)
- it 'finds timelogs until the end of day of end_date' do
- timelogs = resolve_timelogs(**args)
+ user = create(:user)
- expect(timelogs).to contain_exactly(timelog1, timelog2)
+ expect(resolve_timelogs(user: user, obj: project2, **args)).to be_empty
+ end
+
+ context 'when arguments are invalid' do
+ let_it_be(:error_class) { Gitlab::Graphql::Errors::ArgumentError }
+
+ context 'when start_time and start_date are present' do
+ let(:args) { { start_time: 6.days.ago, start_date: 6.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Provide either a start date or time, but not both/)
end
end
- context 'when only end_date is present' do
- let(:args) { { end_date: medium_time_ago } }
+ context 'when end_time and end_date are present' do
+ let(:args) { { end_time: 2.days.ago, end_date: 2.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Provide either an end date or time, but not both/)
+ end
+ end
- it 'finds timelogs until the end of day of end_date' do
- timelogs = resolve_timelogs(**args)
+ context 'when start argument is after end argument' do
+ let(:args) { { start_time: 2.days.ago, end_time: 6.days.ago } }
- expect(timelogs).to contain_exactly(timelog3)
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Start argument must be before End argument/)
end
end
+ end
+ end
- context 'when start_time and end_date are present' do
- let(:args) { { start_time: short_time_ago, end_date: short_time_ago } }
+ shared_examples "with a group" do
+ let_it_be(:short_time_ago) { 5.days.ago.beginning_of_day }
+ let_it_be(:medium_time_ago) { 15.days.ago.beginning_of_day }
- it 'finds timelogs until the end of day of end_date' do
- timelogs = resolve_timelogs(**args)
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
- expect(timelogs).to contain_exactly(timelog1, timelog2)
- end
+ let_it_be(:timelog1) { create(:issue_timelog, issue: issue, spent_at: short_time_ago.beginning_of_day) }
+ let_it_be(:timelog2) { create(:issue_timelog, issue: issue, spent_at: short_time_ago.end_of_day) }
+ let_it_be(:timelog3) { create(:merge_request_timelog, merge_request: merge_request, spent_at: medium_time_ago) }
+
+ let(:args) { { start_time: short_time_ago, end_time: short_time_ago.noon } }
+
+ it 'finds all timelogs' do
+ timelogs = resolve_timelogs
+
+ expect(timelogs).to contain_exactly(timelog1, timelog2, timelog3)
+ end
+
+ it 'finds all timelogs within given dates' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1)
+ end
+
+ context 'when only start_date is present' do
+ let(:args) { { start_date: short_time_ago } }
+
+ it 'finds timelogs until the end of day of end_date' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1, timelog2)
end
+ end
- context 'when start_date and end_time are present' do
- let(:args) { { start_date: short_time_ago, end_time: short_time_ago.noon } }
+ context 'when only end_date is present' do
+ let(:args) { { end_date: medium_time_ago } }
- it 'finds all timelogs within start_date and end_time' do
- timelogs = resolve_timelogs(**args)
+ it 'finds timelogs until the end of day of end_date' do
+ timelogs = resolve_timelogs(**args)
- expect(timelogs).to contain_exactly(timelog1)
- end
+ expect(timelogs).to contain_exactly(timelog3)
end
+ end
- context 'when arguments are invalid' do
- let_it_be(:error_class) { Gitlab::Graphql::Errors::ArgumentError }
+ context 'when start_time and end_date are present' do
+ let(:args) { { start_time: short_time_ago, end_date: short_time_ago } }
- context 'when start_time and start_date are present' do
- let(:args) { { start_time: short_time_ago, start_date: short_time_ago } }
+ it 'finds timelogs until the end of day of end_date' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1, timelog2)
+ end
+ end
+
+ context 'when start_date and end_time are present' do
+ let(:args) { { start_date: short_time_ago, end_time: short_time_ago.noon } }
+
+ it 'finds all timelogs within start_date and end_time' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1)
+ end
+ end
+
+ context 'when arguments are invalid' do
+ context 'when start_time and start_date are present' do
+ let(:args) { { start_time: short_time_ago, start_date: short_time_ago } }
- it 'returns correct error' do
- expect { resolve_timelogs(**args) }
- .to raise_error(error_class, /Provide either a start date or time, but not both/)
- end
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Provide either a start date or time, but not both/)
end
+ end
- context 'when end_time and end_date are present' do
- let(:args) { { end_time: short_time_ago, end_date: short_time_ago } }
+ context 'when end_time and end_date are present' do
+ let(:args) { { end_time: short_time_ago, end_date: short_time_ago } }
- it 'returns correct error' do
- expect { resolve_timelogs(**args) }
- .to raise_error(error_class, /Provide either an end date or time, but not both/)
- end
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Provide either an end date or time, but not both/)
end
+ end
- context 'when start argument is after end argument' do
- let(:args) { { start_time: short_time_ago, end_time: medium_time_ago } }
+ context 'when start argument is after end argument' do
+ let(:args) { { start_time: short_time_ago, end_time: medium_time_ago } }
- it 'returns correct error' do
- expect { resolve_timelogs(**args) }
- .to raise_error(error_class, /Start argument must be before End argument/)
- end
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Start argument must be before End argument/)
end
end
end
end
- def resolve_timelogs(user: current_user, **args)
+ shared_examples "with a user" do
+ let_it_be(:short_time_ago) { 5.days.ago.beginning_of_day }
+ let_it_be(:medium_time_ago) { 15.days.ago.beginning_of_day }
+
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let_it_be(:timelog1) { create(:issue_timelog, issue: issue, user: current_user) }
+ let_it_be(:timelog2) { create(:issue_timelog, issue: issue, user: create(:user)) }
+ let_it_be(:timelog3) { create(:merge_request_timelog, merge_request: merge_request, user: current_user) }
+
+ it 'blah' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1, timelog3)
+ end
+ end
+
+ context "on a project" do
+ let(:object) { project }
+ let(:extra_args) { {} }
+
+ it_behaves_like 'with a project'
+ end
+
+ context "with a project filter" do
+ let(:object) { nil }
+ let(:extra_args) { { project_id: project.to_global_id } }
+
+ it_behaves_like 'with a project'
+ end
+
+ context 'on a group' do
+ let(:object) { group }
+ let(:extra_args) { {} }
+
+ it_behaves_like 'with a group'
+ end
+
+ context 'with a group filter' do
+ let(:object) { nil }
+ let(:extra_args) { { group_id: group.to_global_id } }
+
+ it_behaves_like 'with a group'
+ end
+
+ context 'on a user' do
+ let(:object) { current_user }
+ let(:extra_args) { {} }
+ let(:args) { {} }
+
+ it_behaves_like 'with a user'
+ end
+
+ context 'with a user filter' do
+ let(:object) { nil }
+ let(:extra_args) { { username: current_user.username } }
+ let(:args) { {} }
+
+ it_behaves_like 'with a user'
+ end
+
+ context 'when > `default_max_page_size` records' do
+ let(:object) { nil }
+ let!(:timelog_list) { create_list(:timelog, 101, issue: issue) }
+ let(:args) { { project_id: "gid://gitlab/Project/#{project.id}" } }
+ let(:extra_args) { {} }
+
+ it 'pagination returns `default_max_page_size` and sets `has_next_page` true' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs.items.count).to be(100)
+ expect(timelogs.has_next_page).to be(true)
+ end
+ end
+
+ context 'when no object or arguments provided' do
+ let(:object) { nil }
+ let(:args) { {} }
+ let(:extra_args) { {} }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Provide at least one argument/)
+ end
+ end
+
+ def resolve_timelogs(user: current_user, obj: object, **args)
context = { current_user: user }
- resolve(described_class, obj: group, args: args, ctx: context)
+ resolve(described_class, obj: obj, args: args.merge(extra_args), ctx: context)
end
end
diff --git a/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb b/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb
index cc855bbcb53..70f06b58a65 100644
--- a/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Resolvers::UserDiscussionsCountResolver do
let_it_be(:private_discussion) { create_list(:discussion_note_on_issue, 3, noteable: private_issue, project: private_project) }
specify do
- expect(described_class).to have_nullable_graphql_type(GraphQL::INT_TYPE)
+ expect(described_class).to have_nullable_graphql_type(GraphQL::Types::Int)
end
context 'when counting discussions from a public issue' do
diff --git a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
index 6cf23a2f57f..bc173b2a166 100644
--- a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Resolvers::UserNotesCountResolver do
let_it_be(:private_project) { create(:project, :repository, :private) }
specify do
- expect(described_class).to have_nullable_graphql_type(GraphQL::INT_TYPE)
+ expect(described_class).to have_nullable_graphql_type(GraphQL::Types::Int)
end
context 'when counting notes from an issue' do
diff --git a/spec/graphql/subscriptions/issuable_updated_spec.rb b/spec/graphql/subscriptions/issuable_updated_spec.rb
index cc88b37627d..c15b4f532ef 100644
--- a/spec/graphql/subscriptions/issuable_updated_spec.rb
+++ b/spec/graphql/subscriptions/issuable_updated_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Subscriptions::IssuableUpdated do
end
end
- context 'when a GraphQL::ID_TYPE is provided' do
+ context 'when a GraphQL::Types::ID is provided' do
let(:issuable_id) { issue.to_gid.to_s }
it 'raises an exception' do
diff --git a/spec/graphql/types/base_argument_spec.rb b/spec/graphql/types/base_argument_spec.rb
index 61e0179ff21..8f5f2e08799 100644
--- a/spec/graphql/types/base_argument_spec.rb
+++ b/spec/graphql/types/base_argument_spec.rb
@@ -3,15 +3,41 @@
require 'spec_helper'
RSpec.describe Types::BaseArgument do
- include_examples 'Gitlab-style deprecations' do
- let_it_be(:field) do
- Types::BaseField.new(name: 'field', type: String, null: true)
+ let_it_be(:field) do
+ Types::BaseField.new(name: 'field', type: String, null: true)
+ end
+
+ let(:base_args) { { name: 'test', type: String, required: false, owner: field } }
+
+ def subject(args = {})
+ described_class.new(**base_args.merge(args))
+ end
+
+ include_examples 'Gitlab-style deprecations'
+
+ describe 'required argument declarations' do
+ it 'accepts nullable, required arguments' do
+ arguments = base_args.merge({ required: :nullable })
+
+ expect { subject(arguments) }.not_to raise_error
+ end
+
+ it 'accepts required, non-nullable arguments' do
+ arguments = base_args.merge({ required: true })
+
+ expect { subject(arguments) }.not_to raise_error
+ end
+
+ it 'accepts non-required arguments' do
+ arguments = base_args.merge({ required: false })
+
+ expect { subject(arguments) }.not_to raise_error
end
- let(:base_args) { { name: 'test', type: String, required: false, owner: field } }
+ it 'accepts no required argument declaration' do
+ arguments = base_args
- def subject(args = {})
- described_class.new(**base_args.merge(args))
+ expect { subject(arguments) }.not_to raise_error
end
end
end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index c34fbf42dd8..82efd618e38 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Types::BaseField do
end
it 'defaults to 1' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true)
expect(field.to_graphql.complexity).to eq 1
end
@@ -25,7 +25,7 @@ RSpec.describe Types::BaseField do
describe '#base_complexity' do
context 'with no gitaly calls' do
it 'defaults to 1' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true)
expect(field.base_complexity).to eq 1
end
@@ -33,7 +33,7 @@ RSpec.describe Types::BaseField do
context 'with a gitaly call' do
it 'adds 1 to the default value' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, calls_gitaly: true)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, calls_gitaly: true)
expect(field.base_complexity).to eq 2
end
@@ -41,14 +41,14 @@ RSpec.describe Types::BaseField do
end
it 'has specified value' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, complexity: 12)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, complexity: 12)
expect(field.to_graphql.complexity).to eq 12
end
context 'when field has a resolver' do
context 'when a valid complexity is already set' do
- let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: resolver, complexity: 2, max_page_size: 100, null: true) }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: resolver, complexity: 2, max_page_size: 100, null: true) }
it 'uses this complexity' do
expect(field.to_graphql.complexity).to eq 2
@@ -56,7 +56,7 @@ RSpec.describe Types::BaseField do
end
context 'and is a connection' do
- let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: resolver, max_page_size: 100, null: true) }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: resolver, max_page_size: 100, null: true) }
it 'sets complexity depending on arguments for resolvers' do
expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 4
@@ -71,7 +71,7 @@ RSpec.describe Types::BaseField do
context 'and is not a connection' do
it 'sets complexity as normal' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, max_page_size: 100, null: true)
expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 2
expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 2
@@ -82,8 +82,8 @@ RSpec.describe Types::BaseField do
context 'calls_gitaly' do
context 'for fields with a resolver' do
it 'adds 1 if true' do
- with_gitaly_field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, null: true, calls_gitaly: true)
- without_gitaly_field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, null: true)
+ with_gitaly_field = described_class.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, null: true, calls_gitaly: true)
+ without_gitaly_field = described_class.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, null: true)
base_result = without_gitaly_field.to_graphql.complexity.call({}, {}, 2)
expect(with_gitaly_field.to_graphql.complexity.call({}, {}, 2)).to eq base_result + 1
@@ -92,28 +92,28 @@ RSpec.describe Types::BaseField do
context 'for fields without a resolver' do
it 'adds 1 if true' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, calls_gitaly: true)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, calls_gitaly: true)
expect(field.to_graphql.complexity).to eq 2
end
end
it 'defaults to false' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true)
expect(field.base_complexity).to eq Types::BaseField::DEFAULT_COMPLEXITY
end
context 'with declared constant complexity value' do
it 'has complexity set to that constant' do
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, complexity: 12)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, complexity: 12)
expect(field.to_graphql.complexity).to eq 12
end
it 'does not raise an error even with Gitaly calls' do
allow(Gitlab::GitalyClient).to receive(:get_request_count).and_return([0, 1])
- field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, complexity: 12)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, complexity: 12)
expect(field.to_graphql.complexity).to eq 12
end
@@ -123,7 +123,7 @@ RSpec.describe Types::BaseField do
describe '#visible?' do
context 'and has a feature_flag' do
let(:flag) { :test_feature }
- let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE, feature_flag: flag, null: false) }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, feature_flag: flag, null: false) }
let(:context) { {} }
before do
@@ -156,7 +156,7 @@ RSpec.describe Types::BaseField do
describe '#description' do
context 'feature flag given' do
- let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE, feature_flag: flag, null: false, description: 'Test description.') }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, feature_flag: flag, null: false, description: 'Test description.') }
let(:flag) { :test_flag }
it 'prepends the description' do
@@ -211,7 +211,7 @@ RSpec.describe Types::BaseField do
include_examples 'Gitlab-style deprecations' do
def subject(args = {})
- base_args = { name: 'test', type: GraphQL::STRING_TYPE, null: true }
+ base_args = { name: 'test', type: GraphQL::Types::String, null: true }
described_class.new(**base_args.merge(args))
end
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index cdf09dd9cc9..4efa3018dad 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -255,7 +255,7 @@ RSpec.describe Types::GlobalIDType do
query(GraphQL.parse(gql_query), vars).result
end
- all_types = [::GraphQL::ID_TYPE, ::Types::GlobalIDType, ::Types::GlobalIDType[::Project]]
+ all_types = [::GraphQL::Types::ID, ::Types::GlobalIDType, ::Types::GlobalIDType[::Project]]
shared_examples 'a working query' do
# Simplified schema to test compatibility
@@ -284,7 +284,7 @@ RSpec.describe Types::GlobalIDType do
# This is needed so that all types are always registered as input types
field :echo, String, null: true do
- argument :id, ::GraphQL::ID_TYPE, required: false
+ argument :id, ::GraphQL::Types::ID, required: false
argument :gid, ::Types::GlobalIDType, required: false
argument :pid, ::Types::GlobalIDType[::Project], required: false
end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index ef11e3d309c..33250f8e6af 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe GitlabSchema.types['Group'] do
two_factor_grace_period auto_devops_enabled emails_disabled
mentions_disabled parent boards milestones group_members
merge_requests container_repositories container_repositories_count
- packages
+ packages shared_runners_setting timelogs
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -39,6 +39,15 @@ RSpec.describe GitlabSchema.types['Group'] do
it { is_expected.to have_graphql_resolver(Resolvers::GroupMembersResolver) }
end
+ describe 'timelogs field' do
+ subject { described_class.fields['timelogs'] }
+
+ it 'finds timelogs between start time and end time' do
+ is_expected.to have_graphql_resolver(Resolvers::TimelogResolver)
+ is_expected.to have_non_null_graphql_type(Types::TimelogType.connection_type)
+ end
+ end
+
it_behaves_like 'a GraphQL type with labels' do
let(:labels_resolver_arguments) { [:search_term, :includeAncestorGroups, :includeDescendantGroups, :onlyGroupLabels] }
end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index a117741b3a2..b0aa11ee5ad 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
it 'has specific fields' do
fields = %i[id iid title description state reference author assignees updated_by participants labels milestone due_date
- confidential discussion_locked upvotes downvotes user_notes_count user_discussions_count web_path web_url relative_position
+ confidential discussion_locked upvotes downvotes merge_requests_count user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert severity current_user_todos moved moved_to
create_note_email timelogs project_id]
diff --git a/spec/graphql/types/merge_requests/reviewer_type_spec.rb b/spec/graphql/types/merge_requests/reviewer_type_spec.rb
index c2182e9968c..4ede8e5788f 100644
--- a/spec/graphql/types/merge_requests/reviewer_type_spec.rb
+++ b/spec/graphql/types/merge_requests/reviewer_type_spec.rb
@@ -31,6 +31,8 @@ RSpec.describe GitlabSchema.types['MergeRequestReviewer'] do
starredProjects
callouts
merge_request_interaction
+ namespace
+ timelogs
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/namespace_type_spec.rb b/spec/graphql/types/namespace_type_spec.rb
index 2ed1ee3e8c4..3b7f7e65e4b 100644
--- a/spec/graphql/types/namespace_type_spec.rb
+++ b/spec/graphql/types/namespace_type_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe GitlabSchema.types['Namespace'] do
it 'has the expected fields' do
expected_fields = %w[
id name path full_name full_path description description_html visibility
- lfs_enabled request_access_enabled projects root_storage_statistics
+ lfs_enabled request_access_enabled projects root_storage_statistics shared_runners_setting
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/nuget/dependency_link_metdatum_type_spec.rb b/spec/graphql/types/packages/nuget/dependency_link_metdatum_type_spec.rb
new file mode 100644
index 00000000000..b11d9d131aa
--- /dev/null
+++ b/spec/graphql/types/packages/nuget/dependency_link_metdatum_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['NugetDependencyLinkMetadata'] do
+ it 'includes nuget dependency link metadatum fields' do
+ expected_fields = %w[
+ id target_framework
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/package_dependency_link_type_spec.rb b/spec/graphql/types/packages/package_dependency_link_type_spec.rb
new file mode 100644
index 00000000000..53ee8be69a6
--- /dev/null
+++ b/spec/graphql/types/packages/package_dependency_link_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageDependencyLink'] do
+ it 'includes package file fields' do
+ expected_fields = %w[
+ id dependency_type dependency metadata
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/package_dependency_type_enum_spec.rb b/spec/graphql/types/packages/package_dependency_type_enum_spec.rb
new file mode 100644
index 00000000000..b8893a3619e
--- /dev/null
+++ b/spec/graphql/types/packages/package_dependency_type_enum_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageDependencyType'] do
+ it 'exposes all depeendency type values' do
+ expect(described_class.values.keys).to contain_exactly(*%w[DEPENDENCIES DEV_DEPENDENCIES BUNDLE_DEPENDENCIES PEER_DEPENDENCIES])
+ end
+end
diff --git a/spec/graphql/types/packages/package_dependency_type_spec.rb b/spec/graphql/types/packages/package_dependency_type_spec.rb
new file mode 100644
index 00000000000..67474729781
--- /dev/null
+++ b/spec/graphql/types/packages/package_dependency_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageDependency'] do
+ it 'includes package file fields' do
+ expected_fields = %w[
+ id name version_pattern
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/package_details_type_spec.rb b/spec/graphql/types/packages/package_details_type_spec.rb
index 06093813315..7e1103d8aa0 100644
--- a/spec/graphql/types/packages/package_details_type_spec.rb
+++ b/spec/graphql/types/packages/package_details_type_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['PackageDetailsType'] do
it 'includes all the package fields' do
expected_fields = %w[
- id name version created_at updated_at package_type tags project pipelines versions package_files
+ id name version created_at updated_at package_type tags project pipelines versions package_files dependency_links
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/permission_types/base_permission_type_spec.rb b/spec/graphql/types/permission_types/base_permission_type_spec.rb
index 68632a509ee..e4726ad0e6e 100644
--- a/spec/graphql/types/permission_types/base_permission_type_spec.rb
+++ b/spec/graphql/types/permission_types/base_permission_type_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Types::PermissionTypes::BasePermissionType do
expected_keywords = {
name: :resolve_using_hash,
hash_key: :the_key,
- type: GraphQL::BOOLEAN_TYPE,
+ type: GraphQL::Types::Boolean,
description: "custom description",
null: false
}
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index a22110e8338..d825bd7ebd4 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe GitlabSchema.types['Project'] do
issue_status_counts terraform_states alert_management_integrations
container_repositories container_repositories_count
pipeline_analytics squash_read_only sast_ci_configuration
- ci_template
+ ci_template timelogs
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -392,6 +392,15 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_resolver(Resolvers::Terraform::StatesResolver) }
end
+ describe 'timelogs field' do
+ subject { described_class.fields['timelogs'] }
+
+ it 'finds timelogs for project' do
+ is_expected.to have_graphql_resolver(Resolvers::TimelogResolver)
+ is_expected.to have_graphql_type(Types::TimelogType.connection_type)
+ end
+ end
+
it_behaves_like 'a GraphQL type with labels' do
let(:labels_resolver_arguments) { [:search_term, :includeAncestorGroups] }
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 9a8f2090cc1..6a43867f1fe 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe GitlabSchema.types['Query'] do
runner_platforms
runner
runners
+ timelogs
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
@@ -125,4 +126,14 @@ RSpec.describe GitlabSchema.types['Query'] do
it { is_expected.to have_graphql_type(Types::Packages::PackageDetailsType) }
end
+
+ describe 'timelogs field' do
+ subject { described_class.fields['timelogs'] }
+
+ it 'returns timelogs' do
+ is_expected.to have_graphql_arguments(:startDate, :endDate, :startTime, :endTime, :username, :projectId, :groupId, :after, :before, :first, :last)
+ is_expected.to have_graphql_type(Types::TimelogType.connection_type)
+ is_expected.to have_graphql_resolver(Resolvers::TimelogResolver)
+ end
+ end
end
diff --git a/spec/graphql/types/range_input_type_spec.rb b/spec/graphql/types/range_input_type_spec.rb
index aa6fd72cf13..ca27527c2b5 100644
--- a/spec/graphql/types/range_input_type_spec.rb
+++ b/spec/graphql/types/range_input_type_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ::Types::RangeInputType do
- let(:of_integer) { ::GraphQL::INT_TYPE }
+ let(:of_integer) { ::GraphQL::Types::Int }
context 'parameterized on Integer' do
let(:type) { described_class[of_integer] }
@@ -32,12 +32,12 @@ RSpec.describe ::Types::RangeInputType do
expect(instance).to be_a_kind_of(described_class)
expect(instance).to be_a_kind_of(described_class[of_integer])
- expect(instance).not_to be_a_kind_of(described_class[GraphQL::ID_TYPE])
+ expect(instance).not_to be_a_kind_of(described_class[GraphQL::Types::ID])
end
it 'follows expected subtyping relationships for classes' do
expect(described_class[of_integer]).to be < described_class
- expect(described_class[of_integer]).not_to be < described_class[GraphQL::ID_TYPE]
+ expect(described_class[of_integer]).not_to be < described_class[GraphQL::Types::ID]
expect(described_class[of_integer]).not_to be < described_class[of_integer, false]
end
end
diff --git a/spec/graphql/types/repository_type_spec.rb b/spec/graphql/types/repository_type_spec.rb
index ee0cc4361da..5488d78b720 100644
--- a/spec/graphql/types/repository_type_spec.rb
+++ b/spec/graphql/types/repository_type_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe GitlabSchema.types['Repository'] do
specify { expect(described_class).to have_graphql_field(:tree) }
+ specify { expect(described_class).to have_graphql_field(:paginated_tree, calls_gitaly?: true, max_page_size: 100) }
+
specify { expect(described_class).to have_graphql_field(:exists, calls_gitaly?: true, complexity: 2) }
specify { expect(described_class).to have_graphql_field(:blobs) }
diff --git a/spec/graphql/types/timelog_type_spec.rb b/spec/graphql/types/timelog_type_spec.rb
index 1344af89fb6..dc1b1e2253e 100644
--- a/spec/graphql/types/timelog_type_spec.rb
+++ b/spec/graphql/types/timelog_type_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Timelog'] do
- let(:fields) { %i[spent_at time_spent user issue merge_request note] }
+ let(:fields) { %i[spent_at time_spent user issue merge_request note summary] }
it { expect(described_class.graphql_name).to eq('Timelog') }
it { expect(described_class).to have_graphql_fields(fields) }
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index 7d73727b041..363ccdf88b7 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -36,6 +36,8 @@ RSpec.describe GitlabSchema.types['User'] do
projectMemberships
starredProjects
callouts
+ namespace
+ timelogs
]
expect(described_class).to have_graphql_fields(*expected_fields)
@@ -57,4 +59,13 @@ RSpec.describe GitlabSchema.types['User'] do
is_expected.to have_graphql_type(Types::UserCalloutType.connection_type)
end
end
+
+ describe 'timelogs field' do
+ subject { described_class.fields['timelogs'] }
+
+ it 'returns user timelogs' do
+ is_expected.to have_graphql_resolver(Resolvers::TimelogResolver)
+ is_expected.to have_graphql_type(Types::TimelogType.connection_type)
+ end
+ end
end
diff --git a/spec/helpers/admin/user_actions_helper_spec.rb b/spec/helpers/admin/user_actions_helper_spec.rb
index d945b13cad6..3bc380fbc99 100644
--- a/spec/helpers/admin/user_actions_helper_spec.rb
+++ b/spec/helpers/admin/user_actions_helper_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Admin::UserActionsHelper do
group.add_owner(user)
end
- it { is_expected.to contain_exactly("edit", "block", "ban", "deactivate") }
+ it { is_expected.to contain_exactly("edit", "block", "ban", "deactivate", "delete_with_contributions") }
end
context 'the user is a bot' do
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index bf533ca7034..7e3f665a99c 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -472,4 +472,23 @@ RSpec.describe ApplicationHelper do
allow(helper.controller).to receive(method_name).and_return(value)
end
end
+
+ describe '#gitlab_ui_form_for' do
+ let_it_be(:user) { build(:user) }
+
+ before do
+ allow(helper).to receive(:users_path).and_return('/root')
+ allow(helper).to receive(:form_for).and_call_original
+ end
+
+ it 'adds custom form builder to options and calls `form_for`' do
+ options = { html: { class: 'foo-bar' } }
+ expected_options = options.merge({ builder: ::Gitlab::FormBuilders::GitlabUiFormBuilder, url: '/root' })
+
+ expect do |b|
+ helper.gitlab_ui_form_for(user, options, &b)
+ end.to yield_with_args(::Gitlab::FormBuilders::GitlabUiFormBuilder)
+ expect(helper).to have_received(:form_for).with(user, expected_options)
+ end
+ end
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 90bfb2e72e6..6d51d85fd64 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -238,7 +238,7 @@ RSpec.describe ApplicationSettingsHelper do
expect(helper.kroki_available_formats).to eq([
{
name: 'kroki_formats_blockdiag',
- label: 'BlockDiag (includes BlockDiag, SeqDiag, ActDiag, NwDiag, PacketDiag and RackDiag)',
+ label: 'BlockDiag (includes BlockDiag, SeqDiag, ActDiag, NwDiag, PacketDiag, and RackDiag)',
value: true
},
{
@@ -254,4 +254,34 @@ RSpec.describe ApplicationSettingsHelper do
])
end
end
+
+ describe '.pending_user_count' do
+ let(:user_cap) { 200 }
+
+ before do
+ stub_application_setting(new_user_signups_cap: user_cap)
+ end
+
+ subject(:pending_user_count) { helper.pending_user_count }
+
+ context 'when new_user_signups_cap is present' do
+ it 'returns the number of blocked pending users' do
+ create(:user, state: :blocked_pending_approval)
+
+ expect(pending_user_count).to eq 1
+ end
+ end
+
+ context 'when the new_user_signups_cap is not present' do
+ let(:user_cap) { nil }
+
+ it { is_expected.to eq 0 }
+
+ it 'does not query users unnecessarily' do
+ expect(User).not_to receive(:blocked_pending_approval)
+
+ pending_user_count
+ end
+ end
+ end
end
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index 09495bbde35..5601ab2df2a 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe ButtonHelper do
expect(element.attr('itemprop')).to eq(nil)
expect(element.inner_text).to eq("")
- expect(element.to_html).to include sprite_icon('copy-to-clipboard')
+ expect(element.to_html).to include sprite_icon('copy-to-clipboard', css_class: 'gl-icon')
end
end
@@ -195,6 +195,10 @@ RSpec.describe ButtonHelper do
it 'shows copy to clipboard button with provided `button_text` as button label' do
expect(element(button_text: 'Copy text').inner_text).to eq('Copy text')
end
+
+ it 'adds `gl-button-icon` class to icon' do
+ expect(element(button_text: 'Copy text')).to have_css('svg.gl-button-icon')
+ end
end
context 'with `hide_tooltip` attribute provided' do
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index 3ce4657282e..3183a0a2394 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"empty-state-illustration-path" => 'foo',
"initial-branch-name" => nil,
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
- "needs-help-page-path" => help_page_path('ci/yaml/README', anchor: 'needs'),
+ "needs-help-page-path" => help_page_path('ci/yaml/index', anchor: 'needs'),
"new-merge-request-path" => '/mock/project/-/merge_requests/new',
"pipeline_etag" => graphql_etag_pipeline_sha_path(project.commit.sha),
"pipeline-page-path" => project_pipelines_path(project),
@@ -56,7 +56,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => project.repository.branches.length,
- "yml-help-page-path" => help_page_path('ci/yaml/README')
+ "yml-help-page-path" => help_page_path('ci/yaml/index')
})
end
end
@@ -74,7 +74,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"empty-state-illustration-path" => 'foo',
"initial-branch-name" => nil,
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
- "needs-help-page-path" => help_page_path('ci/yaml/README', anchor: 'needs'),
+ "needs-help-page-path" => help_page_path('ci/yaml/index', anchor: 'needs'),
"new-merge-request-path" => '/mock/project/-/merge_requests/new',
"pipeline_etag" => '',
"pipeline-page-path" => project_pipelines_path(project),
@@ -83,7 +83,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => 0,
- "yml-help-page-path" => help_page_path('ci/yaml/README')
+ "yml-help-page-path" => help_page_path('ci/yaml/index')
})
end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index f64afa1ed71..f1e19f17c72 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -82,6 +82,10 @@ RSpec.describe ClustersHelper do
expect(subject[:get_started_docs_url]).to eq(help_page_path('user/clusters/agent/index', anchor: 'define-a-configuration-repository'))
expect(subject[:integration_docs_url]).to eq(help_page_path('user/clusters/agent/index', anchor: 'get-started-with-gitops-and-the-gitlab-agent'))
end
+
+ it 'displays kas address' do
+ expect(subject[:kas_address]).to eq(Gitlab::Kas.external_url)
+ end
end
describe '#js_clusters_list_data' do
diff --git a/spec/helpers/environment_helper_spec.rb b/spec/helpers/environment_helper_spec.rb
index 8c542ca01f4..0eecae32cc1 100644
--- a/spec/helpers/environment_helper_spec.rb
+++ b/spec/helpers/environment_helper_spec.rb
@@ -22,4 +22,41 @@ RSpec.describe EnvironmentHelper do
end
end
end
+
+ describe '#environments_detail_data_json' do
+ subject { helper.environments_detail_data_json(user, project, environment) }
+
+ let_it_be(:auto_stop_at) { Time.now.utc }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be(:environment) { create(:environment, project: project, auto_stop_at: auto_stop_at) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?).and_return(true)
+ end
+
+ it 'returns the correct data' do
+ expect(subject).to eq({
+ name: environment.name,
+ id: environment.id,
+ external_url: environment.external_url,
+ can_update_environment: true,
+ can_destroy_environment: true,
+ can_read_environment: true,
+ can_stop_environment: true,
+ can_admin_environment: true,
+ environment_metrics_path: environment_metrics_path(environment),
+ environments_fetch_path: project_environments_path(project, format: :json),
+ environment_edit_path: edit_project_environment_path(project, environment),
+ environment_stop_path: stop_project_environment_path(project, environment),
+ environment_delete_path: environment_delete_path(environment),
+ environment_cancel_auto_stop_path: cancel_auto_stop_project_environment_path(project, environment),
+ environment_terminal_path: terminal_project_environment_path(project, environment),
+ has_terminals: false,
+ is_environment_available: true,
+ auto_stop_at: auto_stop_at
+ }.to_json)
+ end
+ end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 22867a5b652..60bed247d85 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -199,4 +199,13 @@ RSpec.describe EnvironmentsHelper do
expect(helper.environment_logs_data(project, environment)).to eq(expected_data)
end
end
+
+ describe '#environment_data' do
+ it 'returns the environment as JSON' do
+ expected_data = { id: environment.id,
+ name: environment.name,
+ external_url: environment.external_url }.to_json
+ expect(helper.environment_data(environment)).to eq(expected_data)
+ end
+ end
end
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index b409bebaac3..f5bc587bce3 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Groups::GroupMembersHelper do
let_it_be(:group) { create(:group) }
before do
+ allow(helper).to receive(:can?).with(current_user, :export_group_memberships, group).and_return(false)
allow(helper).to receive(:can?).with(current_user, :owner_access, group).and_return(true)
allow(helper).to receive(:current_user).and_return(current_user)
end
@@ -23,7 +24,7 @@ RSpec.describe Groups::GroupMembersHelper do
end
end
- describe '#group_members_app_data_json' do
+ describe '#group_members_app_data' do
include_context 'group_group_link'
let(:members) { create_list(:group_member, 2, group: shared_group, created_by: current_user) }
@@ -33,27 +34,26 @@ RSpec.describe Groups::GroupMembersHelper do
let(:members_collection) { members }
subject do
- Gitlab::Json.parse(
- helper.group_members_app_data_json(
- shared_group,
- members: present_members(members_collection),
- invited: present_members(invited),
- access_requests: present_members(access_requests)
- )
+ helper.group_members_app_data(
+ shared_group,
+ members: present_members(members_collection),
+ invited: present_members(invited),
+ access_requests: present_members(access_requests)
)
end
shared_examples 'members.json' do |member_type|
it 'returns `members` property that matches json schema' do
- expect(subject[member_type]['members'].to_json).to match_schema('members')
+ expect(subject[member_type.to_sym][:members].to_json).to match_schema('members')
end
it 'sets `member_path` property' do
- expect(subject[member_type]['member_path']).to eq('/groups/foo-bar/-/group_members/:id')
+ expect(subject[member_type.to_sym][:member_path]).to eq('/groups/foo-bar/-/group_members/:id')
end
end
before do
+ allow(helper).to receive(:can?).with(current_user, :export_group_memberships, shared_group).and_return(true)
allow(helper).to receive(:group_group_member_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_members/:id')
allow(helper).to receive(:group_group_link_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_links/:id')
allow(helper).to receive(:can?).with(current_user, :admin_group_member, shared_group).and_return(true)
@@ -63,7 +63,7 @@ RSpec.describe Groups::GroupMembersHelper do
expected = {
source_id: shared_group.id,
can_manage_members: true
- }.as_json
+ }
expect(subject).to include(expected)
end
@@ -90,11 +90,11 @@ RSpec.describe Groups::GroupMembersHelper do
context 'group links' do
it 'sets `group.members` property that matches json schema' do
- expect(subject['group']['members'].to_json).to match_schema('group_link/group_group_links')
+ expect(subject[:group][:members].to_json).to match_schema('group_link/group_group_links')
end
it 'sets `member_path` property' do
- expect(subject['group']['member_path']).to eq('/groups/foo-bar/-/group_links/:id')
+ expect(subject[:group][:member_path]).to eq('/groups/foo-bar/-/group_links/:id')
end
end
@@ -108,7 +108,7 @@ RSpec.describe Groups::GroupMembersHelper do
params: {}
}.as_json
- expect(subject['access_request']['pagination']).to include(expected)
+ expect(subject[:access_request][:pagination].as_json).to include(expected)
end
end
@@ -124,7 +124,7 @@ RSpec.describe Groups::GroupMembersHelper do
params: { invited_members_page: nil, search_invited: nil }
}.as_json
- expect(subject['user']['pagination']).to include(expected)
+ expect(subject[:user][:pagination].as_json).to include(expected)
end
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index ad6852f63df..42da1cb71f1 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -19,11 +19,15 @@ RSpec.describe GroupsHelper do
end
end
- describe '#group_dependency_proxy_url' do
+ describe '#group_dependency_proxy_image_prefix' do
+ let_it_be(:group) { build_stubbed(:group, path: 'GroupWithUPPERcaseLetters') }
+
it 'converts uppercase letters to lowercase' do
- group = build_stubbed(:group, path: 'GroupWithUPPERcaseLetters')
+ expect(group_dependency_proxy_image_prefix(group)).to end_with("/groupwithuppercaseletters#{DependencyProxy::URL_SUFFIX}")
+ end
- expect(group_dependency_proxy_url(group)).to end_with("/groupwithuppercaseletters#{DependencyProxy::URL_SUFFIX}")
+ it 'removes the protocol' do
+ expect(group_dependency_proxy_image_prefix(group)).not_to include('http')
end
end
@@ -263,42 +267,6 @@ RSpec.describe GroupsHelper do
end
end
- describe '#group_container_registry_nav' do
- let_it_be(:group) { create(:group, :public) }
- let_it_be(:user) { create(:user) }
-
- before do
- stub_container_registry_config(enabled: true)
- allow(helper).to receive(:current_user) { user }
- allow(helper).to receive(:can?).with(user, :read_container_image, group) { true }
- helper.instance_variable_set(:@group, group)
- end
-
- subject { helper.group_container_registry_nav? }
-
- context 'when container registry is enabled' do
- it { is_expected.to be_truthy }
-
- it 'is disabled for guest' do
- allow(helper).to receive(:can?).with(user, :read_container_image, group) { false }
- expect(subject).to be false
- end
- end
-
- context 'when container registry is not enabled' do
- before do
- stub_container_registry_config(enabled: false)
- end
-
- it { is_expected.to be_falsy }
-
- it 'is disabled for guests' do
- allow(helper).to receive(:can?).with(user, :read_container_image, group) { false }
- expect(subject).to be false
- end
- end
- end
-
describe '#group_sidebar_links' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
@@ -313,15 +281,30 @@ RSpec.describe GroupsHelper do
it 'returns all the expected links' do
links = [
:overview, :activity, :issues, :labels, :milestones, :merge_requests,
- :group_members, :settings
+ :runners, :group_members, :settings
]
expect(helper.group_sidebar_links).to include(*links)
end
- it 'includes settings when the user can admin the group' do
+ it 'excludes runners when the user cannot admin the group' do
+ expect(helper).to receive(:current_user) { user }
+ # TODO Proper policies, such as `read_group_runners, should be implemented per
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/334802
+ expect(helper).to receive(:can?).twice.with(user, :admin_group, group) { false }
+
+ expect(helper.group_sidebar_links).not_to include(:runners)
+ end
+
+ it 'excludes runners when the feature "runner_list_group_view_vue_ui" is disabled' do
+ stub_feature_flags(runner_list_group_view_vue_ui: false)
+
+ expect(helper.group_sidebar_links).not_to include(:runners)
+ end
+
+ it 'excludes settings when the user can admin the group' do
expect(helper).to receive(:current_user) { user }
- expect(helper).to receive(:can?).with(user, :admin_group, group) { false }
+ expect(helper).to receive(:can?).twice.with(user, :admin_group, group) { false }
expect(helper.group_sidebar_links).not_to include(:settings)
end
@@ -540,22 +523,22 @@ RSpec.describe GroupsHelper do
end
end
- describe '#cached_issuables_count' do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:group) { create(:group, name: 'group') }
-
- context 'with issues type' do
- let(:type) { :issues }
- let(:count_service) { Groups::OpenIssuesCountService }
+ describe '#can_admin_group_member?' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
- it_behaves_like 'cached issuables count'
+ before do
+ allow(helper).to receive(:current_user) { user }
end
- context 'with merge requests type' do
- let(:type) { :merge_requests }
- let(:count_service) { Groups::MergeRequestsCountService }
+ it 'returns true when current_user can admin members' do
+ group.add_owner(user)
+
+ expect(helper.can_admin_group_member?(group)).to be(true)
+ end
- it_behaves_like 'cached issuables count'
+ it 'returns false when current_user can not admin members' do
+ expect(helper.can_admin_group_member?(group)).to be(false)
end
end
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index 3d2adaa5b5d..e0e05140d6c 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -14,102 +14,92 @@ RSpec.describe InviteMembersHelper do
helper.extend(Gitlab::Experimentation::ControllerConcern)
end
- context 'with project' do
- before do
- allow(helper).to receive(:current_user) { owner }
- assign(:project, project)
- end
-
- describe "#can_invite_members_for_project?" do
- context 'when the user can_manage_project_members' do
+ describe '#common_invite_modal_dataset' do
+ context 'when member_areas_of_focus is enabled', :experiment do
+ context 'with control experience' do
before do
- allow(helper).to receive(:can_manage_project_members?).and_return(true)
+ stub_experiments(member_areas_of_focus: :control)
end
- it 'returns true' do
- expect(helper.can_invite_members_for_project?(project)).to eq true
- expect(helper).to have_received(:can_manage_project_members?)
- end
+ it 'has expected attributes' do
+ attributes = {
+ areas_of_focus_options: [],
+ no_selection_areas_of_focus: []
+ }
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'returns false' do
- expect(helper.can_invite_members_for_project?(project)).to eq false
- expect(helper).not_to have_received(:can_manage_project_members?)
- end
+ expect(helper.common_invite_modal_dataset(project)).to include(attributes)
end
end
- context 'when the user can not manage project members' do
+ context 'with candidate experience' do
before do
- expect(helper).to receive(:can_manage_project_members?).and_return(false)
+ stub_experiments(member_areas_of_focus: :candidate)
end
- it 'returns false' do
- expect(helper.can_invite_members_for_project?(project)).to eq false
+ it 'has expected attributes', :aggregate_failures do
+ output = helper.common_invite_modal_dataset(project)
+
+ expect(output[:no_selection_areas_of_focus]).to eq ['no_selection']
+ expect(Gitlab::Json.parse(output[:areas_of_focus_options]).first['value']).to eq 'Contribute to the codebase'
end
end
end
- describe "#directly_invite_members?" do
- context 'when the user is an owner' do
- before do
- allow(helper).to receive(:current_user) { owner }
- end
-
- it 'returns true' do
- expect(helper.directly_invite_members?).to eq true
- end
+ context 'when member_areas_of_focus is disabled' do
+ before do
+ stub_feature_flags(member_areas_of_focus: false)
end
- context 'when the user is a developer' do
- before do
- allow(helper).to receive(:current_user) { developer }
- end
+ it 'has expected attributes' do
+ attributes = {
+ id: project.id,
+ name: project.name,
+ default_access_level: Gitlab::Access::GUEST,
+ areas_of_focus_options: [],
+ no_selection_areas_of_focus: []
+ }
- it 'returns false' do
- expect(helper.directly_invite_members?).to eq false
- end
+ expect(helper.common_invite_modal_dataset(project)).to match(attributes)
end
end
end
- context 'with group' do
- let_it_be(:group) { create(:group) }
+ context 'with project' do
+ before do
+ allow(helper).to receive(:current_user) { owner }
+ assign(:project, project)
+ end
- describe "#invite_group_members?" do
- context 'when the user is an owner' do
+ describe "#can_invite_members_for_project?" do
+ context 'when the user can_admin_project_member' do
before do
- group.add_owner(owner)
- allow(helper).to receive(:current_user) { owner }
+ allow(helper).to receive(:can?).with(owner, :admin_project_member, project).and_return(true)
end
- it 'returns false' do
- allow(helper).to receive(:experiment_enabled?).with(:invite_members_empty_group_version_a) { false }
-
- expect(helper.invite_group_members?(group)).to eq false
+ it 'returns true', :aggregate_failures do
+ expect(helper.can_invite_members_for_project?(project)).to eq true
+ expect(helper).to have_received(:can?).with(owner, :admin_project_member, project)
end
- it 'returns true' do
- allow(helper).to receive(:experiment_enabled?).with(:invite_members_empty_group_version_a) { true }
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
- expect(helper.invite_group_members?(group)).to eq true
+ it 'returns false', :aggregate_failures do
+ expect(helper.can_invite_members_for_project?(project)).to eq false
+ expect(helper).not_to have_received(:can?).with(owner, :admin_project_member, project)
+ end
end
end
- context 'when the user is a developer' do
+ context 'when the user can not manage project members' do
before do
- group.add_developer(developer)
- allow(helper).to receive(:current_user) { developer }
+ expect(helper).to receive(:can?).with(owner, :admin_project_member, project).and_return(false)
end
it 'returns false' do
- allow(helper).to receive(:experiment_enabled?).with(:invite_members_empty_group_version_a) { true }
-
- expect(helper.invite_group_members?(group)).to eq false
+ expect(helper.can_invite_members_for_project?(project)).to eq false
end
end
end
diff --git a/spec/helpers/issuables_description_templates_helper_spec.rb b/spec/helpers/issuables_description_templates_helper_spec.rb
index 95460174266..638dd201fc8 100644
--- a/spec/helpers/issuables_description_templates_helper_spec.rb
+++ b/spec/helpers/issuables_description_templates_helper_spec.rb
@@ -41,19 +41,6 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
context 'when project parent group does not have a file template project' do
it_behaves_like 'project issuable templates'
end
-
- context 'when project parent group has a file template project' do
- let_it_be(:file_template_project) { create(:project, :custom_repo, group: parent_group, files: issuable_template_files) }
- let_it_be(:group, reload: true) { create(:group, parent: parent_group) }
- let_it_be(:project, reload: true) { create(:project, :custom_repo, group: group, files: issuable_template_files) }
-
- before do
- project.update!(group: group)
- parent_group.update_columns(file_template_project_id: file_template_project.id)
- end
-
- it_behaves_like 'project issuable templates'
- end
end
end
@@ -65,16 +52,12 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
allow(helper).to receive(:issuable_templates).and_return(templates)
end
- context 'with matching project templates' do
+ context 'with project templates' do
let(:templates) do
{
"" => [
- { name: "another_issue_template", id: "another_issue_template", project_id: project.id },
- { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id }
- ],
- "Instance" => [
- { name: "first_issue_issue_template", id: "first_issue_issue_template", project_id: non_existing_record_id },
- { name: "second_instance_issue_template", id: "second_instance_issue_template", project_id: non_existing_record_id }
+ { name: "another_issue_template", id: "another_issue_template" },
+ { name: "custom_issue_template", id: "custom_issue_template" }
]
}
end
@@ -90,10 +73,6 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
"Project Templates" => [
{ name: "another_issue_template", id: "another_issue_template", project_id: non_existing_record_id },
{ name: "custom_issue_template", id: "custom_issue_template", project_id: non_existing_record_id }
- ],
- "Instance" => [
- { name: "first_issue_issue_template", id: "first_issue_issue_template", project_id: non_existing_record_id },
- { name: "second_instance_issue_template", id: "second_instance_issue_template", project_id: non_existing_record_id }
]
}
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 96aba312ba3..9cf3808ab72 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -1,14 +1,26 @@
# frozen_string_literal: true
-require "spec_helper"
+require 'spec_helper'
RSpec.describe IssuesHelper do
let(:project) { create(:project) }
let(:issue) { create :issue, project: project }
let(:ext_project) { create :redmine_project }
+ describe '#work_item_type_icon' do
+ it 'returns icon of all standard base types' do
+ WorkItem::Type.base_types.each do |type|
+ expect(work_item_type_icon(type[0])).to eq "issue-type-#{type[0].to_s.dasherize}"
+ end
+ end
+
+ it 'defaults to issue icon if type is unknown' do
+ expect(work_item_type_icon('invalid')).to eq 'issue-type-issue'
+ end
+ end
+
describe '#award_user_list' do
- it "returns a comma-separated list of the first X users" do
+ it 'returns a comma-separated list of the first X users' do
user = build_stubbed(:user, name: 'Joe')
awards = Array.new(3, build_stubbed(:award_emoji, user: user))
@@ -24,7 +36,7 @@ RSpec.describe IssuesHelper do
expect(award_user_list([award], nil)).to eq 'Joe'
end
- it "truncates lists" do
+ it 'truncates lists' do
user = build_stubbed(:user, name: 'Jane')
awards = Array.new(5, build_stubbed(:award_emoji, user: user))
@@ -32,14 +44,14 @@ RSpec.describe IssuesHelper do
.to eq('Jane, Jane, Jane, and 2 more.')
end
- it "displays the current user in front of other users" do
+ it 'displays the current user in front of other users' do
current_user = build_stubbed(:user)
my_award = build_stubbed(:award_emoji, user: current_user)
award = build_stubbed(:award_emoji, user: build_stubbed(:user, name: 'Jane'))
awards = Array.new(5, award).push(my_award)
expect(award_user_list(awards, current_user, limit: 2))
- .to eq("You, Jane, and 4 more.")
+ .to eq('You, Jane, and 4 more.')
end
end
@@ -54,19 +66,19 @@ RSpec.describe IssuesHelper do
end
end
- it "returns disabled string for unauthenticated user" do
- expect(helper.award_state_class(awardable, AwardEmoji.all, nil)).to eq("disabled")
+ it 'returns disabled string for unauthenticated user' do
+ expect(helper.award_state_class(awardable, AwardEmoji.all, nil)).to eq('disabled')
end
- it "returns disabled for a user that does not have access to the awardable" do
- expect(helper.award_state_class(awardable, AwardEmoji.all, build(:user))).to eq("disabled")
+ it 'returns disabled for a user that does not have access to the awardable' do
+ expect(helper.award_state_class(awardable, AwardEmoji.all, build(:user))).to eq('disabled')
end
- it "returns active string for author" do
- expect(helper.award_state_class(awardable, AwardEmoji.all, upvote.user)).to eq("active")
+ it 'returns active string for author' do
+ expect(helper.award_state_class(awardable, AwardEmoji.all, upvote.user)).to eq('active')
end
- it "is blank for a user that has access to the awardable" do
+ it 'is blank for a user that has access to the awardable' do
user = build(:user)
expect(helper).to receive(:can?).with(user, :award_emoji, awardable).and_return(true)
@@ -74,40 +86,40 @@ RSpec.describe IssuesHelper do
end
end
- describe "awards_sort" do
- it "sorts a hash so thumbsup and thumbsdown are always on top" do
- data = { "thumbsdown" => "some value", "lifter" => "some value", "thumbsup" => "some value" }
+ describe 'awards_sort' do
+ it 'sorts a hash so thumbsup and thumbsdown are always on top' do
+ data = { 'thumbsdown' => 'some value', 'lifter' => 'some value', 'thumbsup' => 'some value' }
expect(awards_sort(data).keys).to eq(%w(thumbsup thumbsdown lifter))
end
end
- describe "#link_to_discussions_to_resolve" do
- describe "passing only a merge request" do
+ describe '#link_to_discussions_to_resolve' do
+ describe 'passing only a merge request' do
let(:merge_request) { create(:merge_request) }
- it "links just the merge request" do
+ it 'links just the merge request' do
expected_path = project_merge_request_path(merge_request.project, merge_request)
expect(link_to_discussions_to_resolve(merge_request, nil)).to include(expected_path)
end
- it "contains the reference to the merge request" do
+ it 'contains the reference to the merge request' do
expect(link_to_discussions_to_resolve(merge_request, nil)).to include(merge_request.to_reference)
end
end
- describe "when passing a discussion" do
+ describe 'when passing a discussion' do
let(:diff_note) { create(:diff_note_on_merge_request) }
let(:merge_request) { diff_note.noteable }
let(:discussion) { diff_note.to_discussion }
- it "links to the merge request with first note if a single discussion was passed" do
+ it 'links to the merge request with first note if a single discussion was passed' do
expected_path = Gitlab::UrlBuilder.build(diff_note)
expect(link_to_discussions_to_resolve(merge_request, discussion)).to include(expected_path)
end
- it "contains both the reference to the merge request and a mention of the discussion" do
+ it 'contains both the reference to the merge request and a mention of the discussion' do
expect(link_to_discussions_to_resolve(merge_request, discussion)).to include("#{merge_request.to_reference} (discussion #{diff_note.id})")
end
end
@@ -235,13 +247,13 @@ RSpec.describe IssuesHelper do
end
describe '#use_startup_call' do
- it "returns false when a query param is present" do
+ it 'returns false when a query param is present' do
allow(controller.request).to receive(:query_parameters).and_return({ foo: 'bar' })
expect(helper.use_startup_call?).to eq(false)
end
- it "returns false when user has stored sort preference" do
+ it 'returns false when user has stored sort preference' do
controller.instance_variable_set(:@sort, 'updated_asc')
expect(helper.use_startup_call?).to eq(false)
@@ -265,13 +277,13 @@ RSpec.describe IssuesHelper do
it 'returns expected result' do
expected = {
- can_create_issue: "true",
- can_reopen_issue: "true",
- can_report_spam: "false",
- can_update_issue: "true",
+ can_create_issue: 'true',
+ can_reopen_issue: 'true',
+ can_report_spam: 'false',
+ can_update_issue: 'true',
iid: issue.iid,
- is_issue_author: "false",
- issue_type: "issue",
+ is_issue_author: 'false',
+ issue_type: 'issue',
new_issue_path: new_project_issue_path(project),
project_path: project.full_path,
report_abuse_path: new_abuse_report_path(user_id: issue.author.id, ref_url: issue_url(issue)),
@@ -307,7 +319,7 @@ RSpec.describe IssuesHelper do
initial_email: project.new_issuable_address(current_user, 'issue'),
is_signed_in: current_user.present?.to_s,
issues_path: project_issues_path(project),
- jira_integration_path: help_page_url('integration/jira/', anchor: 'view-jira-issues'),
+ jira_integration_path: help_page_url('integration/jira/issues', anchor: 'view-jira-issues'),
markdown_help_path: help_page_path('user/markdown'),
max_attachment_size: number_to_human_size(Gitlab::CurrentSettings.max_attachment_size.megabytes),
new_issue_path: new_project_issue_path(project, issue: { milestone_id: finder.milestones.first.id }),
@@ -345,7 +357,7 @@ RSpec.describe IssuesHelper do
end
it 'returns manual ordering class' do
- expect(helper.issue_manual_ordering_class).to eq("manual-ordering")
+ expect(helper.issue_manual_ordering_class).to eq('manual-ordering')
end
context 'when manual sorting disabled' do
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index e3d9bc5b174..03b9c538225 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -221,13 +221,13 @@ RSpec.describe Nav::NewDropdownHelper do
let(:with_show_new_issue_link) { false }
let(:with_merge_project) { nil }
let(:with_can_create_snippet_in_project) { false }
- let(:with_can_import_members) { false }
+ let(:with_can_admin_project_member) { false }
before do
allow(helper).to receive(:show_new_issue_link?).with(project) { with_show_new_issue_link }
allow(helper).to receive(:merge_request_source_project_for_project).with(project) { with_merge_project }
allow(helper).to receive(:can?).with(user, :create_snippet, project) { with_can_create_snippet_in_project }
- allow(helper).to receive(:can_import_members?) { with_can_import_members }
+ allow(helper).to receive(:can_admin_project_member?) { with_can_admin_project_member }
end
it 'has no menu sections' do
@@ -290,7 +290,7 @@ RSpec.describe Nav::NewDropdownHelper do
context 'when invite members experiment' do
let(:with_invite_members_experiment) { true }
- let(:with_can_import_members) { true }
+ let(:with_can_admin_project_member) { true }
let(:expected_title) { 'This project' }
let(:expected_href) { "/#{project.path_with_namespace}/-/project_members" }
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 4c5f440b8a3..f0ad2038347 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -112,16 +112,6 @@ RSpec.describe NavHelper do
it { is_expected.to all(be_a(String)) }
end
- describe '.group_issues_sub_menu_items' do
- subject { helper.group_issues_sub_menu_items }
-
- before do
- allow(helper).to receive(:current_user).and_return(nil)
- end
-
- it { is_expected.to all(be_a(String)) }
- end
-
describe '#page_has_markdown?' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb
index 8b3c8411fbd..bc60c582ff8 100644
--- a/spec/helpers/packages_helper_spec.rb
+++ b/spec/helpers/packages_helper_spec.rb
@@ -219,4 +219,25 @@ RSpec.describe PackagesHelper do
it { is_expected.to eq(expected_result) }
end
end
+
+ describe '#package_details_data' do
+ let_it_be(:package) { create(:package) }
+
+ before do
+ allow(helper).to receive(:current_user) { project.owner }
+ allow(helper).to receive(:can?) { true }
+ end
+
+ it 'when use_presenter is true populate the package key' do
+ result = helper.package_details_data(project, package, true)
+
+ expect(result[:package]).not_to be_nil
+ end
+
+ it 'when use_presenter is false the package key is nil' do
+ result = helper.package_details_data(project, package, false)
+
+ expect(result[:package]).to be_nil
+ end
+ end
end
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index b180b5ec06f..4e3a0147509 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -8,142 +8,8 @@ RSpec.describe Projects::ProjectMembersHelper do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let(:allow_admin_project) { nil }
-
before do
allow(helper).to receive(:current_user).and_return(current_user)
- allow(helper).to receive(:can?).with(current_user, :admin_project_member, project).and_return(allow_admin_project)
- end
-
- shared_examples 'when `current_user` does not have `admin_project_member` permissions' do
- let(:allow_admin_project) { false }
-
- it { is_expected.to be(false) }
- end
-
- describe '#can_manage_project_members?' do
- subject { helper.can_manage_project_members?(project) }
-
- context 'when `current_user` has `admin_project_member` permissions' do
- let(:allow_admin_project) { true }
-
- it { is_expected.to be(true) }
- end
-
- include_examples 'when `current_user` does not have `admin_project_member` permissions'
- end
-
- describe '#show_groups?' do
- subject { helper.show_groups?(project.project_group_links) }
-
- context 'when group links exist' do
- let!(:project_group_link) { create(:project_group_link, project: project) }
-
- it { is_expected.to be(true) }
- end
-
- context 'when `search_groups` param is set' do
- before do
- allow(helper).to receive(:params).and_return({ search_groups: 'foo' })
- end
-
- it { is_expected.to be(true) }
- end
-
- context 'when `search_groups` param is not set and group links do not exist' do
- it { is_expected.to be(false) }
- end
- end
-
- describe '#show_invited_members?' do
- subject { helper.show_invited_members?(project, project.project_members.invite) }
-
- context 'when `current_user` has `admin_project_member` permissions' do
- let(:allow_admin_project) { true }
-
- context 'when invited members exist' do
- let!(:invite) { create(:project_member, :invited, project: project) }
-
- it { is_expected.to be(true) }
- end
-
- context 'when invited members do not exist' do
- it { is_expected.to be(false) }
- end
- end
-
- include_examples 'when `current_user` does not have `admin_project_member` permissions'
- end
-
- describe '#show_access_requests?' do
- subject { helper.show_access_requests?(project, project.requesters) }
-
- context 'when `current_user` has `admin_project_member` permissions' do
- let(:allow_admin_project) { true }
-
- context 'when access requests exist' do
- let!(:access_request) { create(:project_member, :access_request, project: project) }
-
- it { is_expected.to be(true) }
- end
-
- context 'when access requests do not exist' do
- it { is_expected.to be(false) }
- end
- end
-
- include_examples 'when `current_user` does not have `admin_project_member` permissions'
- end
-
- describe '#groups_tab_active?' do
- subject { helper.groups_tab_active? }
-
- context 'when `search_groups` param is set' do
- before do
- allow(helper).to receive(:params).and_return({ search_groups: 'foo' })
- end
-
- it { is_expected.to be(true) }
- end
-
- context 'when `search_groups` param is not set' do
- it { is_expected.to be(false) }
- end
- end
-
- describe '#current_user_is_group_owner?' do
- let(:group) { create(:group) }
-
- subject { helper.current_user_is_group_owner?(project2) }
-
- describe "when current user is the owner of the project's parent group" do
- let(:project2) { create(:project, namespace: group) }
-
- before do
- group.add_owner(current_user)
- end
-
- it { is_expected.to be(true) }
- end
-
- describe "when current user is not the owner of the project's parent group" do
- let_it_be(:user) { create(:user) }
-
- let(:project2) { create(:project, namespace: group) }
-
- before do
- group.add_owner(user)
- end
-
- it { is_expected.to be(false) }
- end
-
- describe "when project does not have a parent group" do
- let(:user) { create(:user) }
- let(:project2) { create(:project, namespace: user.namespace) }
-
- it { is_expected.to be(false) }
- end
end
describe 'project members' do
@@ -155,8 +21,6 @@ RSpec.describe Projects::ProjectMembersHelper do
let(:members_collection) { members }
describe '#project_members_app_data_json' do
- let(:allow_admin_project) { true }
-
subject do
Gitlab::Json.parse(
helper.project_members_app_data_json(
@@ -171,6 +35,7 @@ RSpec.describe Projects::ProjectMembersHelper do
before do
allow(helper).to receive(:project_project_member_path).with(project, ':id').and_return('/foo-bar/-/project_members/:id')
+ project.add_maintainer(current_user)
end
it 'returns expected json' do
diff --git a/spec/helpers/projects/terraform_helper_spec.rb b/spec/helpers/projects/terraform_helper_spec.rb
index 8833e23c47d..9c2f009be26 100644
--- a/spec/helpers/projects/terraform_helper_spec.rb
+++ b/spec/helpers/projects/terraform_helper_spec.rb
@@ -22,6 +22,18 @@ RSpec.describe Projects::TerraformHelper do
expect(subject[:project_path]).to eq(project.full_path)
end
+ it 'includes access token path' do
+ expect(subject[:access_tokens_path]).to eq(profile_personal_access_tokens_path)
+ end
+
+ it 'includes username' do
+ expect(subject[:username]).to eq(current_user.username)
+ end
+
+ it 'includes terraform state api url' do
+ expect(subject[:terraform_api_url]).to eq("#{Settings.gitlab.url}/api/v4/projects/#{project.id}/terraform/state")
+ end
+
it 'indicates the user is a terraform admin' do
expect(subject[:terraform_admin]).to eq(true)
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 75e80f5edbc..4dac4403f70 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -720,21 +720,21 @@ RSpec.describe ProjectsHelper do
end
end
- describe '#can_import_members?' do
+ describe '#can_admin_project_member?' do
context 'when user is project owner' do
before do
allow(helper).to receive(:current_user) { project.owner }
end
it 'returns true for owner of project' do
- expect(helper.can_import_members?).to eq true
+ expect(helper.can_admin_project_member?(project)).to eq true
end
end
context 'when user is not a project owner' do
using RSpec::Parameterized::TableSyntax
- where(:user_project_role, :can_import) do
+ where(:user_project_role, :can_admin) do
:maintainer | true
:developer | false
:reporter | false
@@ -748,7 +748,7 @@ RSpec.describe ProjectsHelper do
end
it 'resolves if the user can import members' do
- expect(helper.can_import_members?).to eq can_import
+ expect(helper.can_admin_project_member?(project)).to eq can_admin
end
end
end
@@ -918,33 +918,39 @@ RSpec.describe ProjectsHelper do
end
end
- describe '#project_permissions_settings' do
- context 'with no project_setting associated' do
- it 'includes a value for edit commit messages' do
- settings = project_permissions_settings(project)
+ describe '#project_permissions_panel_data' do
+ subject { helper.project_permissions_panel_data(project) }
- expect(settings[:allowEditingCommitMessages]).to be_falsy
- end
- end
-
- context 'when commits are allowed to be edited' do
- it 'includes the edit commit message value' do
- project.create_project_setting(allow_editing_commit_messages: true)
-
- settings = project_permissions_settings(project)
-
- expect(settings[:allowEditingCommitMessages]).to be_truthy
- end
+ before do
+ allow(helper).to receive(:can?) { true }
+ allow(helper).to receive(:current_user).and_return(user)
end
- context 'when commits are not allowed to be edited' do
- it 'returns false to the edit commit message value' do
- project.create_project_setting(allow_editing_commit_messages: false)
-
- settings = project_permissions_settings(project)
-
- expect(settings[:allowEditingCommitMessages]).to be_falsy
- end
+ it 'includes project_permissions_settings' do
+ settings = subject.dig(:currentSettings)
+
+ expect(settings).to include(
+ packagesEnabled: !!project.packages_enabled,
+ visibilityLevel: project.visibility_level,
+ requestAccessEnabled: !!project.request_access_enabled,
+ issuesAccessLevel: project.project_feature.issues_access_level,
+ repositoryAccessLevel: project.project_feature.repository_access_level,
+ forkingAccessLevel: project.project_feature.forking_access_level,
+ mergeRequestsAccessLevel: project.project_feature.merge_requests_access_level,
+ buildsAccessLevel: project.project_feature.builds_access_level,
+ wikiAccessLevel: project.project_feature.wiki_access_level,
+ snippetsAccessLevel: project.project_feature.snippets_access_level,
+ pagesAccessLevel: project.project_feature.pages_access_level,
+ analyticsAccessLevel: project.project_feature.analytics_access_level,
+ containerRegistryEnabled: !!project.container_registry_enabled,
+ lfsEnabled: !!project.lfs_enabled,
+ emailsDisabled: project.emails_disabled?,
+ metricsDashboardAccessLevel: project.project_feature.metrics_dashboard_access_level,
+ operationsAccessLevel: project.project_feature.operations_access_level,
+ showDefaultAwardEmojis: project.show_default_award_emojis?,
+ securityAndComplianceAccessLevel: project.security_and_compliance_access_level,
+ containerRegistryAccessLevel: project.project_feature.container_registry_access_level
+ )
end
end
end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index 35882c9337b..12d791d8710 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -92,4 +92,23 @@ RSpec.describe SnippetsHelper do
end
end
end
+
+ describe '#snippet_report_abuse_path' do
+ let(:snippet) { public_personal_snippet }
+ let(:current_user) { create(:user) }
+
+ subject { snippet_report_abuse_path(snippet) }
+
+ it 'returns false if the user cannot submit the snippet as spam' do
+ allow(snippet).to receive(:submittable_as_spam_by?).and_return(false)
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns true if the user can submit the snippet as spam' do
+ allow(snippet).to receive(:submittable_as_spam_by?).and_return(true)
+
+ expect(subject).to be_truthy
+ end
+ end
end
diff --git a/spec/helpers/time_zone_helper_spec.rb b/spec/helpers/time_zone_helper_spec.rb
index 391e9bd38ed..e6cb20b5800 100644
--- a/spec/helpers/time_zone_helper_spec.rb
+++ b/spec/helpers/time_zone_helper_spec.rb
@@ -68,4 +68,24 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do
end
end
end
+
+ describe '#local_time' do
+ let_it_be(:timezone) { 'America/Los_Angeles' }
+
+ before do
+ travel_to Time.find_zone(timezone).local(2021, 7, 20, 15, 30, 45)
+ end
+
+ context 'when a valid timezone is passed' do
+ it 'returns local time' do
+ expect(helper.local_time(timezone)).to eq('3:30 PM')
+ end
+ end
+
+ context 'when an invalid timezone is passed' do
+ it 'returns local time using the configured default timezone (UTC in this case)' do
+ expect(helper.local_time('Foo/Bar')).to eq('10:30 PM')
+ end
+ end
+ end
end
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index 90333cb0ad5..5ef1e9d4daf 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -61,34 +61,6 @@ RSpec.describe UserCalloutsHelper do
end
end
- describe '.show_service_templates_deprecated_callout?' do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:admin) { create(:user, :admin) }
- let_it_be(:non_admin) { create(:user) }
-
- subject { helper.show_service_templates_deprecated_callout? }
-
- where(:self_managed, :is_admin_user, :has_active_service_template, :callout_dismissed, :should_show_callout) do
- true | true | true | false | true
- true | true | true | true | false
- true | false | true | false | false
- false | true | true | false | false
- true | true | false | false | false
- end
-
- with_them do
- before do
- allow(::Gitlab).to receive(:com?).and_return(!self_managed)
- allow(helper).to receive(:current_user).and_return(is_admin_user ? admin : non_admin)
- allow(helper).to receive(:user_dismissed?).with(described_class::SERVICE_TEMPLATES_DEPRECATED_CALLOUT) { callout_dismissed }
- create(:service, :template, type: 'MattermostService', active: has_active_service_template)
- end
-
- it { is_expected.to be should_show_callout }
- end
- end
-
describe '.show_customize_homepage_banner?' do
subject { helper.show_customize_homepage_banner? }
diff --git a/spec/initializers/00_rails_disable_joins_spec.rb b/spec/initializers/00_rails_disable_joins_spec.rb
new file mode 100644
index 00000000000..78e78b6810b
--- /dev/null
+++ b/spec/initializers/00_rails_disable_joins_spec.rb
@@ -0,0 +1,288 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'DisableJoins' do
+ let(:primary_model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_primary_records'
+
+ def self.name
+ 'TestPrimary'
+ end
+ end
+ end
+
+ let(:bridge_model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_bridge_records'
+
+ def self.name
+ 'TestBridge'
+ end
+ end
+ end
+
+ let(:secondary_model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_secondary_records'
+
+ def self.name
+ 'TestSecondary'
+ end
+ end
+ end
+
+ context 'passing disable_joins as an association option' do
+ context 'when the association is a bare has_one' do
+ it 'disallows the disable_joins option' do
+ expect do
+ primary_model.has_one :test_bridge, disable_joins: true
+ end.to raise_error(ArgumentError, /Unknown key: :disable_joins/)
+ end
+ end
+
+ context 'when the association is a belongs_to' do
+ it 'disallows the disable_joins option' do
+ expect do
+ bridge_model.belongs_to :test_secondary, disable_joins: true
+ end.to raise_error(ArgumentError, /Unknown key: :disable_joins/)
+ end
+ end
+
+ context 'when the association is has_one :through' do
+ it 'allows the disable_joins option' do
+ primary_model.has_one :test_bridge
+ bridge_model.belongs_to :test_secondary
+
+ expect do
+ primary_model.has_one :test_secondary, through: :test_bridge, disable_joins: true
+ end.not_to raise_error
+ end
+ end
+
+ context 'when the association is a bare has_many' do
+ it 'disallows the disable_joins option' do
+ expect do
+ primary_model.has_many :test_bridges, disable_joins: true
+ end.to raise_error(ArgumentError, /Unknown key: :disable_joins/)
+ end
+ end
+
+ context 'when the association is a has_many :through' do
+ it 'allows the disable_joins option' do
+ primary_model.has_many :test_bridges
+ bridge_model.belongs_to :test_secondary
+
+ expect do
+ primary_model.has_many :test_secondaries, through: :test_bridges, disable_joins: true
+ end.not_to raise_error
+ end
+ end
+ end
+
+ context 'querying has_one :through when disable_joins is set' do
+ before do
+ create_tables(<<~SQL)
+ CREATE TABLE _test_primary_records (
+ id serial NOT NULL PRIMARY KEY);
+
+ CREATE TABLE _test_bridge_records (
+ id serial NOT NULL PRIMARY KEY,
+ primary_record_id int NOT NULL,
+ secondary_record_id int NOT NULL);
+
+ CREATE TABLE _test_secondary_records (
+ id serial NOT NULL PRIMARY KEY);
+ SQL
+
+ primary_model.has_one :test_bridge, anonymous_class: bridge_model, foreign_key: :primary_record_id
+ bridge_model.belongs_to :test_secondary, anonymous_class: secondary_model, foreign_key: :secondary_record_id
+ primary_model.has_one :test_secondary, through: :test_bridge, anonymous_class: secondary_model,
+ disable_joins: -> { joins_disabled_flag }
+
+ primary_record = primary_model.create!
+ secondary_record = secondary_model.create!
+ bridge_model.create!(primary_record_id: primary_record.id, secondary_record_id: secondary_record.id)
+ end
+
+ context 'when disable_joins evaluates to true' do
+ let(:joins_disabled_flag) { true }
+
+ it 'executes separate queries' do
+ primary_record = primary_model.first
+
+ query_count = ActiveRecord::QueryRecorder.new { primary_record.test_secondary }.count
+
+ expect(query_count).to eq(2)
+ end
+ end
+
+ context 'when disable_joins evalutes to false' do
+ let(:joins_disabled_flag) { false }
+
+ it 'executes a single query' do
+ primary_record = primary_model.first
+
+ query_count = ActiveRecord::QueryRecorder.new { primary_record.test_secondary }.count
+
+ expect(query_count).to eq(1)
+ end
+ end
+ end
+
+ context 'querying has_many :through when disable_joins is set' do
+ before do
+ create_tables(<<~SQL)
+ CREATE TABLE _test_primary_records (
+ id serial NOT NULL PRIMARY KEY);
+
+ CREATE TABLE _test_bridge_records (
+ id serial NOT NULL PRIMARY KEY,
+ primary_record_id int NOT NULL);
+
+ CREATE TABLE _test_secondary_records (
+ id serial NOT NULL PRIMARY KEY,
+ bridge_record_id int NOT NULL);
+ SQL
+
+ primary_model.has_many :test_bridges, anonymous_class: bridge_model, foreign_key: :primary_record_id
+ bridge_model.has_many :test_secondaries, anonymous_class: secondary_model, foreign_key: :bridge_record_id
+ primary_model.has_many :test_secondaries, through: :test_bridges, anonymous_class: secondary_model,
+ disable_joins: -> { disabled_join_flag }
+
+ primary_record = primary_model.create!
+ bridge_record = bridge_model.create!(primary_record_id: primary_record.id)
+ secondary_model.create!(bridge_record_id: bridge_record.id)
+ end
+
+ context 'when disable_joins evaluates to true' do
+ let(:disabled_join_flag) { true }
+
+ it 'executes separate queries' do
+ primary_record = primary_model.first
+
+ query_count = ActiveRecord::QueryRecorder.new { primary_record.test_secondaries.first }.count
+
+ expect(query_count).to eq(2)
+ end
+ end
+
+ context 'when disable_joins evalutes to false' do
+ let(:disabled_join_flag) { false }
+
+ it 'executes a single query' do
+ primary_record = primary_model.first
+
+ query_count = ActiveRecord::QueryRecorder.new { primary_record.test_secondaries.first }.count
+
+ expect(query_count).to eq(1)
+ end
+ end
+ end
+
+ context 'querying STI relationships' do
+ let(:child_bridge_model) do
+ Class.new(bridge_model) do
+ def self.name
+ 'ChildBridge'
+ end
+ end
+ end
+
+ let(:child_secondary_model) do
+ Class.new(secondary_model) do
+ def self.name
+ 'ChildSecondary'
+ end
+ end
+ end
+
+ before do
+ create_tables(<<~SQL)
+ CREATE TABLE _test_primary_records (
+ id serial NOT NULL PRIMARY KEY);
+
+ CREATE TABLE _test_bridge_records (
+ id serial NOT NULL PRIMARY KEY,
+ primary_record_id int NOT NULL,
+ type text);
+
+ CREATE TABLE _test_secondary_records (
+ id serial NOT NULL PRIMARY KEY,
+ bridge_record_id int NOT NULL,
+ type text);
+ SQL
+
+ primary_model.has_many :child_bridges, anonymous_class: child_bridge_model, foreign_key: :primary_record_id
+ child_bridge_model.has_one :child_secondary, anonymous_class: child_secondary_model, foreign_key: :bridge_record_id
+ primary_model.has_many :child_secondaries, through: :child_bridges, anonymous_class: child_secondary_model, disable_joins: true
+
+ primary_record = primary_model.create!
+ parent_bridge_record = bridge_model.create!(primary_record_id: primary_record.id)
+ child_bridge_record = child_bridge_model.create!(primary_record_id: primary_record.id)
+
+ secondary_model.create!(bridge_record_id: child_bridge_record.id)
+ child_secondary_model.create!(bridge_record_id: parent_bridge_record.id)
+ child_secondary_model.create!(bridge_record_id: child_bridge_record.id)
+ end
+
+ it 'filters correctly by the STI type across multiple queries' do
+ primary_record = primary_model.first
+
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ expect(primary_record.child_secondaries.count).to eq(1)
+ end
+
+ expect(query_recorder.count).to eq(2)
+ end
+ end
+
+ context 'querying polymorphic relationships' do
+ before do
+ create_tables(<<~SQL)
+ CREATE TABLE _test_primary_records (
+ id serial NOT NULL PRIMARY KEY);
+
+ CREATE TABLE _test_bridge_records (
+ id serial NOT NULL PRIMARY KEY,
+ primaryable_id int NOT NULL,
+ primaryable_type text NOT NULL);
+
+ CREATE TABLE _test_secondary_records (
+ id serial NOT NULL PRIMARY KEY,
+ bridgeable_id int NOT NULL,
+ bridgeable_type text NOT NULL);
+ SQL
+
+ primary_model.has_many :test_bridges, anonymous_class: bridge_model, foreign_key: :primaryable_id, as: :primaryable
+ bridge_model.has_one :test_secondaries, anonymous_class: secondary_model, foreign_key: :bridgeable_id, as: :bridgeable
+ primary_model.has_many :test_secondaries, through: :test_bridges, anonymous_class: secondary_model, disable_joins: true
+
+ primary_record = primary_model.create!
+ primary_bridge_record = bridge_model.create!(primaryable_id: primary_record.id, primaryable_type: 'TestPrimary')
+ nonprimary_bridge_record = bridge_model.create!(primaryable_id: primary_record.id, primaryable_type: 'NonPrimary')
+
+ secondary_model.create!(bridgeable_id: primary_bridge_record.id, bridgeable_type: 'TestBridge')
+ secondary_model.create!(bridgeable_id: nonprimary_bridge_record.id, bridgeable_type: 'TestBridge')
+ secondary_model.create!(bridgeable_id: primary_bridge_record.id, bridgeable_type: 'NonBridge')
+ end
+
+ it 'filters correctly by the polymorphic type across multiple queries' do
+ primary_record = primary_model.first
+
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ expect(primary_record.test_secondaries.count).to eq(1)
+ end
+
+ expect(query_recorder.count).to eq(2)
+ end
+ end
+
+ def create_tables(table_sql)
+ ApplicationRecord.connection.execute(table_sql)
+
+ bridge_model.reset_column_information
+ secondary_model.reset_column_information
+ end
+end
diff --git a/spec/initializers/0_log_deprecations_spec.rb b/spec/initializers/0_log_deprecations_spec.rb
new file mode 100644
index 00000000000..35bceb2f132
--- /dev/null
+++ b/spec/initializers/0_log_deprecations_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe '0_log_deprecations' do
+ def load_initializer
+ load Rails.root.join('config/initializers/0_log_deprecations.rb')
+ end
+
+ let(:env_var) { '1' }
+
+ before do
+ stub_env('GITLAB_LOG_DEPRECATIONS', env_var)
+ load_initializer
+ end
+
+ after do
+ # reset state changed by initializer
+ Warning.clear
+ ActiveSupport::Notifications.unsubscribe('deprecation.rails')
+ end
+
+ context 'for Ruby deprecations' do
+ context 'when catching deprecations through Kernel#warn' do
+ it 'also logs them to deprecation logger' do
+ expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
+ message: 'ABC gem is deprecated',
+ source: 'ruby'
+ )
+
+ expect { warn('ABC gem is deprecated') }.to output.to_stderr
+ end
+ end
+
+ context 'for other messages from Kernel#warn' do
+ it 'does not log them to deprecation logger' do
+ expect(Gitlab::DeprecationJsonLogger).not_to receive(:info)
+
+ expect { warn('Sure is hot today') }.to output.to_stderr
+ end
+ end
+
+ context 'when disabled via environment' do
+ let(:env_var) { '0' }
+
+ it 'does not log them to deprecation logger' do
+ expect(Gitlab::DeprecationJsonLogger).not_to receive(:info)
+
+ expect { warn('ABC gem is deprecated') }.to output.to_stderr
+ end
+ end
+ end
+
+ context 'for Rails deprecations' do
+ it 'logs them to deprecation logger' do
+ expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
+ message: match(/^DEPRECATION WARNING: ABC will be removed/),
+ source: 'rails'
+ )
+
+ expect { ActiveSupport::Deprecation.warn('ABC will be removed') }.to output.to_stderr
+ end
+
+ context 'when disabled via environment' do
+ let(:env_var) { '0' }
+
+ it 'does not log them to deprecation logger' do
+ expect(Gitlab::DeprecationJsonLogger).not_to receive(:info)
+
+ expect { ActiveSupport::Deprecation.warn('ABC will be removed') }.to output.to_stderr
+ end
+ end
+ end
+end
diff --git a/spec/initializers/database_config_spec.rb b/spec/initializers/database_config_spec.rb
index f1b353d4012..5ddfbd64c23 100644
--- a/spec/initializers/database_config_spec.rb
+++ b/spec/initializers/database_config_spec.rb
@@ -21,37 +21,31 @@ RSpec.describe 'Database config initializer' do
let(:max_threads) { 8 }
- context "no existing pool size is set" do
- before do
- stub_database_config(pool_size: nil)
- end
+ it 'retains the correct database name for the connection' do
+ previous_db_name = Gitlab::Database.main.scope.connection.pool.db_config.name
- it "sets it based on the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.from(nil).to(18)
+ subject
- expect(ActiveRecord::Base.connection_db_config.pool).to eq(18)
- end
+ expect(Gitlab::Database.main.scope.connection.pool.db_config.name).to eq(previous_db_name)
end
- context "the existing pool size is smaller than the max number of worker threads" do
- before do
- stub_database_config(pool_size: 1)
- end
+ context 'when no custom headroom is specified' do
+ it 'sets the pool size based on the number of worker threads' do
+ old = ActiveRecord::Base.connection_db_config.pool
- it "sets it based on the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.from(1).to(18)
+ expect(old).not_to eq(18)
- expect(ActiveRecord::Base.connection_db_config.pool).to eq(18)
+ expect { subject }
+ .to change { ActiveRecord::Base.connection_db_config.pool }
+ .from(old)
+ .to(18)
end
- end
- context "and the existing pool size is larger than the max number of worker threads" do
- before do
- stub_database_config(pool_size: 100)
- end
+ it 'overwrites custom pool settings' do
+ config = Gitlab::Database.main.config.merge(pool: 42)
- it "sets it based on the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.from(100).to(18)
+ allow(Gitlab::Database.main).to receive(:config).and_return(config)
+ subject
expect(ActiveRecord::Base.connection_db_config.pool).to eq(18)
end
@@ -61,25 +55,16 @@ RSpec.describe 'Database config initializer' do
let(:headroom) { 15 }
before do
- stub_database_config(pool_size: 1)
stub_env("DB_POOL_HEADROOM", headroom)
end
it "adds headroom on top of the calculated size" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }
- .from(1)
- .to(max_threads + headroom)
+ old = ActiveRecord::Base.connection_db_config.pool
- expect(ActiveRecord::Base.connection_db_config.pool).to eq(max_threads + headroom)
+ expect { subject }
+ .to change { ActiveRecord::Base.connection_db_config.pool }
+ .from(old)
+ .to(23)
end
end
-
- def stub_database_config(pool_size:)
- original_config = Gitlab::Database.config
-
- config = original_config.dup
- config['pool'] = pool_size
-
- allow(Gitlab::Database).to receive(:config).and_return(config)
- end
end
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index a1fd9be299b..4d2aa6e74de 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -120,7 +120,6 @@ RSpec.describe 'lograge', type: :request do
context 'with a log subscriber' do
include_context 'parsed logs'
- include_context 'clear DB Load Balancing configuration'
let(:subscriber) { Lograge::LogSubscribers::ActionController.new }
@@ -212,7 +211,7 @@ RSpec.describe 'lograge', type: :request do
end
before do
- ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
+ ApplicationRecord.connection.execute('SELECT pg_sleep(0.1);')
end
context 'when RequestStore is enabled', :request_store do
diff --git a/spec/initializers/rails_asset_host_spec.rb b/spec/initializers/rails_asset_host_spec.rb
new file mode 100644
index 00000000000..eb69c1fa85b
--- /dev/null
+++ b/spec/initializers/rails_asset_host_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Rails asset host initializer' do
+ def load_initializer
+ load Rails.root.join('config/initializers/rails_asset_host.rb')
+ end
+
+ around do |example|
+ old_asset_host = Rails.application.config.action_controller.asset_host
+
+ example.run
+
+ Rails.application.config.action_controller.asset_host = old_asset_host
+ ActionController::Base.asset_host = old_asset_host
+ end
+
+ subject { Rails.application.config.action_controller.asset_host }
+
+ it 'uses no asset host by default' do
+ load_initializer
+
+ expect(subject).to be nil
+ end
+
+ context 'with cdn_host defined in gitlab.yml' do
+ before do
+ stub_config_setting(cdn_host: 'https://gitlab.example.com')
+ end
+
+ it 'returns https://gitlab.example.com' do
+ load_initializer
+
+ expect(subject).to eq('https://gitlab.example.com')
+ end
+ end
+end
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js
deleted file mode 100644
index b85f50ec998..00000000000
--- a/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * This file should only contain browser specific specs.
- * If you need to add or update a spec, please see spec/frontend/monitoring/components/*.js
- * https://gitlab.com/gitlab-org/gitlab/-/issues/194244#note_343427737
- * https://gitlab.com/groups/gitlab-org/-/epics/895#what-if-theres-a-karma-spec-which-is-simply-unmovable-to-jest-ie-it-is-dependent-on-a-running-browser-environment
- */
-
-import { createLocalVue } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import Vue from 'vue';
-import axios from '~/lib/utils/axios_utils';
-import Dashboard from '~/monitoring/components/dashboard.vue';
-import { createStore } from '~/monitoring/stores';
-import { metricsDashboardPayload, dashboardProps } from '../fixture_data';
-import { mockApiEndpoint } from '../mock_data';
-import { setupStoreWithData } from '../store_utils';
-
-const localVue = createLocalVue();
-
-describe('Dashboard', () => {
- let DashboardComponent;
- let mock;
- let store;
- let component;
- let wrapper;
-
- beforeEach(() => {
- setFixtures(`
- <div class="prometheus-graphs"></div>
- <div class="layout-page"></div>
- `);
-
- store = createStore();
- mock = new MockAdapter(axios);
- DashboardComponent = localVue.extend(Dashboard);
- });
-
- afterEach(() => {
- if (component) {
- component.$destroy();
- }
- if (wrapper) {
- wrapper.destroy();
- }
- mock.restore();
- });
-
- describe('responds to window resizes', () => {
- let promPanel;
- let promGroup;
- let panelToggle;
- let chart;
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsDashboardPayload);
-
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...dashboardProps,
- hasMetrics: true,
- showPanels: true,
- },
- store,
- provide: { hasManagedPrometheus: false },
- });
-
- setupStoreWithData(component.$store);
-
- return Vue.nextTick().then(() => {
- [promPanel] = component.$el.querySelectorAll('.prometheus-panel');
- promGroup = promPanel.querySelector('.prometheus-graph-group');
- panelToggle = promPanel.querySelector('.js-graph-group-toggle');
- chart = promGroup.querySelector('.position-relative svg');
- });
- });
-
- it('setting chart size to zero when panel group is hidden', () => {
- expect(promGroup.style.display).toBe('');
- expect(chart.clientWidth).toBeGreaterThan(0);
-
- panelToggle.click();
- return Vue.nextTick().then(() => {
- expect(promGroup.style.display).toBe('none');
- expect(chart.clientWidth).toBe(0);
- promPanel.style.width = '500px';
- });
- });
-
- it('expanding chart panel group after resize displays chart', () => {
- panelToggle.click();
-
- expect(chart.clientWidth).toBeGreaterThan(0);
- });
- });
-});
diff --git a/spec/javascripts/monitoring/fixture_data.js b/spec/javascripts/monitoring/fixture_data.js
deleted file mode 100644
index 1375c27cdde..00000000000
--- a/spec/javascripts/monitoring/fixture_data.js
+++ /dev/null
@@ -1 +0,0 @@
-export * from '../../frontend/monitoring/fixture_data';
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
deleted file mode 100644
index c80401e8c1d..00000000000
--- a/spec/javascripts/monitoring/mock_data.js
+++ /dev/null
@@ -1,5 +0,0 @@
-// No new code should be added to this file. Instead, modify the
-// file this one re-exports from. For more detail about why, see:
-// https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/31349
-
-export * from '../../frontend/monitoring/mock_data';
diff --git a/spec/javascripts/monitoring/store_utils.js b/spec/javascripts/monitoring/store_utils.js
deleted file mode 100644
index 1222716c829..00000000000
--- a/spec/javascripts/monitoring/store_utils.js
+++ /dev/null
@@ -1 +0,0 @@
-export * from '../../frontend/monitoring/store_utils';
diff --git a/spec/lib/api/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
index 65b35845aab..c6638bea59e 100644
--- a/spec/lib/api/helpers/runner_helpers_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe API::Helpers::Runner do
+RSpec.describe API::Ci::Helpers::Runner do
let(:ip_address) { '1.2.3.4' }
let(:runner_class) do
Class.new do
include API::Helpers
- include API::Helpers::Runner
+ include API::Ci::Helpers::Runner
attr_accessor :params
diff --git a/spec/lib/api/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index e55c20b7ab6..99f2db544a5 100644
--- a/spec/lib/api/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe API::Helpers::Runner do
- let(:helper) { Class.new { include API::Helpers::Runner }.new }
+RSpec.describe API::Ci::Helpers::Runner do
+ let(:helper) { Class.new { include API::Ci::Helpers::Runner }.new }
before do
allow(helper).to receive(:env).and_return({})
diff --git a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
index 99b52236771..ae0c0f53acd 100644
--- a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
let_it_be(:helper) { Class.new.include(described_class).new }
- describe 'redirect_registry_request' do
+ describe '#redirect_registry_request' do
using RSpec::Parameterized::TableSyntax
let(:options) { {} }
@@ -13,7 +13,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
subject { helper.redirect_registry_request(forward_to_registry, package_type, options) { helper.fallback } }
before do
- allow(helper).to receive(:options).and_return(for: API::NpmInstancePackages)
+ allow(helper).to receive(:options).and_return(for: described_class)
end
shared_examples 'executing fallback' do
@@ -34,38 +34,66 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
subject
- expect_snowplow_event(category: 'API::NpmInstancePackages', action: 'npm_request_forward')
+ expect_snowplow_event(category: described_class.to_s, action: "#{package_type}_request_forward")
end
end
- context 'with npm packages' do
- let(:package_type) { :npm }
+ %i[npm pypi].each do |forwardable_package_type|
+ context "with #{forwardable_package_type} packages" do
+ include_context 'dependency proxy helpers context'
- where(:application_setting, :forward_to_registry, :example_name) do
- true | true | 'executing redirect'
- true | false | 'executing fallback'
- false | true | 'executing fallback'
- false | false | 'executing fallback'
- end
+ let(:package_type) { forwardable_package_type }
- with_them do
- before do
- stub_application_setting(npm_package_requests_forwarding: application_setting)
+ where(:application_setting, :forward_to_registry, :example_name) do
+ true | true | 'executing redirect'
+ true | false | 'executing fallback'
+ false | true | 'executing fallback'
+ false | false | 'executing fallback'
end
- it_behaves_like params[:example_name]
+ with_them do
+ before do
+ allow_fetch_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: application_setting)
+ end
+
+ it_behaves_like params[:example_name]
+ end
end
end
- context 'with non-forwardable packages' do
+ context 'with non-forwardable package type' do
let(:forward_to_registry) { true }
before do
stub_application_setting(npm_package_requests_forwarding: true)
+ stub_application_setting(pypi_package_requests_forwarding: true)
end
- Packages::Package.package_types.keys.without('npm').each do |pkg_type|
+ Packages::Package.package_types.keys.without('npm', 'pypi').each do |pkg_type|
context "#{pkg_type}" do
+ let(:package_type) { pkg_type.to_sym }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, "Can't find application setting for package_type #{package_type}")
+ end
+ end
+ end
+ end
+
+ describe '#registry_url' do
+ subject { helper.registry_url(package_type, package_name: 'test') }
+
+ where(:package_type, :expected_result) do
+ :npm | 'https://registry.npmjs.org/test'
+ :pypi | 'https://pypi.org/simple/test/'
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_result) }
+ end
+
+ Packages::Package.package_types.keys.without('npm', 'pypi').each do |pkg_type|
+ context "with non-forwardable package type #{pkg_type}" do
let(:package_type) { pkg_type }
it 'raises an error' do
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 6e48ee4c315..587fe60860a 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -581,4 +581,40 @@ RSpec.describe API::Helpers do
end
end
end
+
+ describe '#order_by_similarity?' do
+ where(:params, :allow_unauthorized, :current_user_set, :expected) do
+ {} | false | false | false
+ {} | true | false | false
+ {} | false | true | false
+ {} | true | true | false
+ { order_by: 'similarity' } | false | false | false
+ { order_by: 'similarity' } | true | false | false
+ { order_by: 'similarity' } | true | true | false
+ { order_by: 'similarity' } | false | true | false
+ { search: 'test' } | false | false | false
+ { search: 'test' } | true | false | false
+ { search: 'test' } | true | true | false
+ { search: 'test' } | false | true | false
+ { order_by: 'similarity', search: 'test' } | false | false | false
+ { order_by: 'similarity', search: 'test' } | true | false | true
+ { order_by: 'similarity', search: 'test' } | true | true | true
+ { order_by: 'similarity', search: 'test' } | false | true | true
+ end
+
+ with_them do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ u = current_user_set ? user : nil
+ subject.instance_variable_set(:@current_user, u)
+
+ allow(subject).to receive(:params).and_return(params)
+ end
+
+ it 'returns the expected result' do
+ expect(subject.order_by_similarity?(allow_unauthorized: allow_unauthorized)).to eq(expected)
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/database_backup_error_spec.rb b/spec/lib/backup/database_backup_error_spec.rb
new file mode 100644
index 00000000000..ef627900050
--- /dev/null
+++ b/spec/lib/backup/database_backup_error_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseBackupError do
+ let(:config) do
+ {
+ host: 'localhost',
+ port: 5432,
+ database: 'gitlabhq_test'
+ }
+ end
+
+ let(:db_file_name) { File.join(Gitlab.config.backup.path, 'db', 'database.sql.gz') }
+
+ subject { described_class.new(config, db_file_name) }
+
+ it { is_expected.to respond_to :config }
+ it { is_expected.to respond_to :db_file_name }
+
+ it 'expects exception message to include database file' do
+ expect(subject.message).to include("#{db_file_name}")
+ end
+
+ it 'expects exception message to include database paths being back-up' do
+ expect(subject.message).to include("#{config[:host]}")
+ expect(subject.message).to include("#{config[:port]}")
+ expect(subject.message).to include("#{config[:database]}")
+ end
+end
diff --git a/spec/lib/backup/file_backup_error_spec.rb b/spec/lib/backup/file_backup_error_spec.rb
new file mode 100644
index 00000000000..bb174bbe4a0
--- /dev/null
+++ b/spec/lib/backup/file_backup_error_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::FileBackupError do
+ let_it_be(:lfs) { create(:lfs_object) }
+ let_it_be(:upload) { create(:upload) }
+
+ let(:backup_tarball) { '/tmp/backup/uploads' }
+
+ shared_examples 'includes backup path' do
+ it { is_expected.to respond_to :app_files_dir }
+ it { is_expected.to respond_to :backup_tarball }
+
+ it 'expects exception message to include file backup path location' do
+ expect(subject.message).to include("#{subject.backup_tarball}")
+ end
+
+ it 'expects exception message to include file being back-up' do
+ expect(subject.message).to include("#{subject.app_files_dir}")
+ end
+ end
+
+ context 'with lfs file' do
+ subject { described_class.new(lfs, backup_tarball) }
+
+ it_behaves_like 'includes backup path'
+ end
+
+ context 'with uploads file' do
+ subject { described_class.new(upload, backup_tarball) }
+
+ it_behaves_like 'includes backup path'
+ end
+end
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index cdb35c0ce01..a48a1752eff 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.owner)
- expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, { in: anything, out: progress }).and_call_original
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything).and_call_original
subject.start(:create)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
@@ -53,7 +53,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel) { 3 }
it 'passes parallel option through' do
- expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, '-parallel', '3', { in: anything, out: progress }).and_call_original
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
subject.start(:create)
subject.wait
@@ -64,7 +64,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel_storage) { 3 }
it 'passes parallel option through' do
- expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, '-parallel-storage', '3', { in: anything, out: progress }).and_call_original
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:create)
subject.wait
@@ -109,7 +109,7 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- expect(Process).to receive(:spawn).with(anything, 'restore', '-path', anything, { in: anything, out: progress }).and_call_original
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything).and_call_original
subject.start(:restore)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
@@ -132,7 +132,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel) { 3 }
it 'does not pass parallel option through' do
- expect(Process).to receive(:spawn).with(anything, 'restore', '-path', anything, { in: anything, out: progress }).and_call_original
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything).and_call_original
subject.start(:restore)
subject.wait
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index feaca6164eb..2cc1bf41d18 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -12,20 +12,13 @@ RSpec.describe Backup::Manager do
before do
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
-
- @old_progress = $progress # rubocop:disable Style/GlobalVars
- $progress = progress # rubocop:disable Style/GlobalVars
- end
-
- after do
- $progress = @old_progress # rubocop:disable Style/GlobalVars
end
describe '#pack' do
- let(:backup_contents) { ['backup_contents'] }
+ let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz backup_information.yml) }
+ let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
- let(:tar_cmdline) { ['tar', '-cf', '-', *backup_contents, tar_system_options] }
-
+ let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] }
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
@@ -36,20 +29,20 @@ RSpec.describe Backup::Manager do
before do
allow(ActiveRecord::Base.connection).to receive(:reconnect!)
allow(Kernel).to receive(:system).and_return(true)
+ allow(YAML).to receive(:load_file).and_return(backup_information)
+
+ ::Backup::Manager::FOLDERS_TO_BACKUP.each do |folder|
+ allow(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, folder)).and_return(true)
+ end
- allow(subject).to receive(:backup_contents).and_return(backup_contents)
allow(subject).to receive(:backup_information).and_return(backup_information)
allow(subject).to receive(:upload)
end
- context 'when BACKUP is not set' do
- let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
-
- it 'uses the default tar file name' do
- subject.pack
+ it 'executes tar' do
+ subject.pack
- expect(Kernel).to have_received(:system).with(*tar_cmdline)
- end
+ expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
context 'when BACKUP is set' do
@@ -62,6 +55,58 @@ RSpec.describe Backup::Manager do
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
+
+ context 'when skipped is set in backup_information.yml' do
+ let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz backup_information.yml} }
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: '12.3',
+ skipped: ['repositories']
+ }
+ end
+
+ it 'executes tar' do
+ subject.pack
+
+ expect(Kernel).to have_received(:system).with(*tar_cmdline)
+ end
+ end
+
+ context 'when a directory does not exist' do
+ let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz backup_information.yml} }
+
+ before do
+ expect(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'repositories')).and_return(false)
+ end
+
+ it 'executes tar' do
+ subject.pack
+
+ expect(Kernel).to have_received(:system).with(*tar_cmdline)
+ end
+ end
+ end
+
+ describe '#remove_tmp' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
+
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_return(true)
+ end
+
+ it 'removes backups/tmp dir' do
+ subject.remove_tmp
+
+ expect(FileUtils).to have_received(:rm_rf).with(path)
+ end
+
+ it 'prints running task with a done confirmation' do
+ subject.remove_tmp
+
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ expect(progress).to have_received(:puts).with('done')
+ end
end
describe '#remove_old' do
diff --git a/spec/lib/backup/repository_backup_error_spec.rb b/spec/lib/backup/repository_backup_error_spec.rb
new file mode 100644
index 00000000000..44c75c1cf77
--- /dev/null
+++ b/spec/lib/backup/repository_backup_error_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::RepositoryBackupError do
+ let_it_be(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:wiki) { ProjectWiki.new(project, nil ) }
+
+ let(:backup_repos_path) { '/tmp/backup/repositories' }
+
+ shared_examples 'includes backup path' do
+ it { is_expected.to respond_to :container }
+ it { is_expected.to respond_to :backup_repos_path }
+
+ it 'expects exception message to include repo backup path location' do
+ expect(subject.message).to include("#{subject.backup_repos_path}")
+ end
+
+ it 'expects exception message to include container being back-up' do
+ expect(subject.message).to include("#{subject.container.disk_path}")
+ end
+ end
+
+ context 'with snippet repository' do
+ subject { described_class.new(snippet, backup_repos_path) }
+
+ it_behaves_like 'includes backup path'
+ end
+
+ context 'with project repository' do
+ subject { described_class.new(project, backup_repos_path) }
+
+ it_behaves_like 'includes backup path'
+ end
+
+ context 'with wiki repository' do
+ subject { described_class.new(wiki, backup_repos_path) }
+
+ it_behaves_like 'includes backup path'
+ end
+end
diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index 7c6b0cac24b..cba41166be4 100644
--- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -220,4 +220,33 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter do
expect(reference_filter(act, project: nil, group: group).to_html).to eq exp
end
end
+
+ context 'checking N+1' do
+ let(:namespace) { create(:namespace) }
+ let(:project2) { create(:project, :public, namespace: namespace) }
+ let(:alert2) { create(:alert_management_alert, project: project2) }
+ let(:alert_reference) { alert.to_reference }
+ let(:alert2_reference) { alert2.to_reference(full: true) }
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ markdown = "#{alert_reference}"
+ max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter(markdown)
+ end.count
+
+ expect(max_count).to eq 1
+
+ markdown = "#{alert_reference} ^alert#2 ^alert#3 ^alert#4 #{alert2_reference}"
+
+ # Since we're not batching alert queries across projects,
+ # we have to account for that.
+ # 1 for both projects, 1 for alerts in each project == 3
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
+ max_count += 2
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(max_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index bee8e42d12e..6bcea41a603 100644
--- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -269,4 +269,34 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter do
expect(reference_filter(act, context).css('a').first.text).to eql("#{project.full_path}@#{commit.short_id}")
end
end
+
+ context 'checking N+1' do
+ let(:namespace2) { create(:namespace) }
+ let(:namespace3) { create(:namespace) }
+ let(:project2) { create(:project, :public, :repository, namespace: namespace2) }
+ let(:project3) { create(:project, :public, :repository, namespace: namespace3) }
+ let(:commit2) { project2.commit }
+ let(:commit3) { project3.commit }
+ let(:commit_reference) { commit.to_reference }
+ let(:commit2_reference) { commit2.to_reference(full: true) }
+ let(:commit3_reference) { commit3.to_reference(full: true) }
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ markdown = "#{commit_reference}"
+ max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter(markdown)
+ end.count
+
+ markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
+
+ # Commits are not DB entries, they are on the project itself.
+ # So adding commits from two more projects to the markdown should
+ # only increase by 1 query
+ max_count += 1
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(max_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index f8a00716680..cdf6110dd6c 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -92,6 +92,11 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
expect(doc.to_html).to match(%r(\(<a.+>#{milestone.reference_link_text}</a>\.\)))
end
+ it 'links with adjacent html tags' do
+ doc = reference_filter("Milestone <p>#{reference}</p>.")
+ expect(doc.to_html).to match(%r(<p><a.+>#{milestone.reference_link_text}</a></p>))
+ end
+
it 'ignores invalid milestone names' do
exp = act = "Milestone #{Milestone.reference_prefix}#{milestone.name.reverse}"
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index 63a5a9184c1..d88e262883f 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -97,4 +97,34 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter do
expect(filter.send(:projects)).to eq([project.full_path])
end
end
+
+ context 'checking N+1' do
+ let_it_be(:normal_project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_project) { create(:project, group: group) }
+ let_it_be(:nested_group) { create(:group, :nested) }
+ let_it_be(:nested_project) { create(:project, group: nested_group) }
+ let_it_be(:normal_project_reference) { get_reference(normal_project) }
+ let_it_be(:group_project_reference) { get_reference(group_project) }
+ let_it_be(:nested_project_reference) { get_reference(nested_project) }
+
+ it 'does not have N+1 per multiple project references', :use_sql_query_cache do
+ markdown = "#{normal_project_reference}"
+
+ # warm up first
+ reference_filter(markdown)
+
+ max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter(markdown)
+ end.count
+
+ expect(max_count).to eq 1
+
+ markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(max_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb b/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb
index 56f36af5066..082e5c92e53 100644
--- a/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb
+++ b/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb
@@ -6,18 +6,42 @@ RSpec.describe Banzai::Filter::TableOfContentsTagFilter do
include FilterSpecHelper
context 'table of contents' do
- let(:html) { '<p>[[<em>TOC</em>]]</p>' }
+ shared_examples 'table of contents tag' do
+ it 'replaces toc tag with ToC result' do
+ doc = filter(html, {}, { toc: "FOO" })
- it 'replaces [[<em>TOC</em>]] with ToC result' do
- doc = filter(html, {}, { toc: "FOO" })
+ expect(doc.to_html).to eq("FOO")
+ end
- expect(doc.to_html).to eq("FOO")
+ it 'handles an empty ToC result' do
+ doc = filter(html)
+
+ expect(doc.to_html).to eq ''
+ end
+ end
+
+ context '[[_TOC_]] as tag' do
+ it_behaves_like 'table of contents tag' do
+ let(:html) { '<p>[[<em>TOC</em>]]</p>' }
+ end
end
- it 'handles an empty ToC result' do
- doc = filter(html)
+ context '[[_toc_]] as tag' do
+ it_behaves_like 'table of contents tag' do
+ let(:html) { '<p>[[<em>toc</em>]]</p>' }
+ end
+ end
+
+ context '[TOC] as tag' do
+ it_behaves_like 'table of contents tag' do
+ let(:html) { '<p>[TOC]</p>' }
+ end
+ end
- expect(doc.to_html).to eq ''
+ context '[toc] as tag' do
+ it_behaves_like 'table of contents tag' do
+ let(:html) { '<p>[toc]</p>' }
+ end
end
end
end
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 989e06a992d..72661003361 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -102,33 +102,45 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
describe 'table of contents' do
let(:project) { create(:project, :public) }
- let(:markdown) do
- <<-MARKDOWN.strip_heredoc
- [[_TOC_]]
+
+ shared_examples 'table of contents tag' do |tag, tag_html|
+ let(:markdown) do
+ <<-MARKDOWN.strip_heredoc
+ #{tag}
# Header
- MARKDOWN
- end
+ MARKDOWN
+ end
- let(:invalid_markdown) do
- <<-MARKDOWN.strip_heredoc
- test [[_TOC_]]
+ let(:invalid_markdown) do
+ <<-MARKDOWN.strip_heredoc
+ test #{tag}
# Header
- MARKDOWN
- end
+ MARKDOWN
+ end
- it 'inserts a table of contents' do
- output = described_class.to_html(markdown, project: project)
+ it 'inserts a table of contents' do
+ output = described_class.to_html(markdown, project: project)
- expect(output).to include("<ul class=\"section-nav\">")
- expect(output).to include("<li><a href=\"#header\">Header</a></li>")
+ expect(output).to include("<ul class=\"section-nav\">")
+ expect(output).to include("<li><a href=\"#header\">Header</a></li>")
+ end
+
+ it 'does not insert a table of contents' do
+ output = described_class.to_html(invalid_markdown, project: project)
+
+ expect(output).to include("test #{tag_html}")
+ end
end
- it 'does not insert a table of contents' do
- output = described_class.to_html(invalid_markdown, project: project)
+ context 'with [[_TOC_]] as tag' do
+ it_behaves_like 'table of contents tag', '[[_TOC_]]', '[[<em>TOC</em>]]'
+ end
- expect(output).to include("test [[<em>TOC</em>]]")
+ context 'with [toc] as tag' do
+ it_behaves_like 'table of contents tag', '[toc]', '[toc]'
+ it_behaves_like 'table of contents tag', '[TOC]', '[TOC]'
end
end
diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
index 007d310247b..59f5e4a6900 100644
--- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Banzai::Pipeline::WikiPipeline do
end
end
- it 'is case-sensitive' do
+ it 'is not case-sensitive' do
markdown = <<-MD.strip_heredoc
[[_toc_]]
@@ -36,9 +36,22 @@ RSpec.describe Banzai::Pipeline::WikiPipeline do
Foo
MD
- output = described_class.to_html(markdown, project: project, wiki: wiki)
+ result = described_class.call(markdown, project: project, wiki: wiki)
+
+ expect(result[:output].to_html).to include(result[:toc])
+ end
+
+ it 'works with alternative [toc] tag' do
+ markdown = <<-MD.strip_heredoc
+ [toc]
- expect(output).to include('[[<em>toc</em>]]')
+ # Header 1
+
+ Foo
+ MD
+
+ result = described_class.call(markdown, project: project, wiki: wiki)
+ expect(result[:output].to_html).to include(result[:toc])
end
it 'handles an empty pipeline result' do
diff --git a/spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb b/spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb
new file mode 100644
index 00000000000..4f00b1ec654
--- /dev/null
+++ b/spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::Collector::SentryAuthParser do
+ describe '.parse' do
+ let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=glet_1fedb514e17f4b958435093deb02048c" } }
+ let(:request) { double('request', headers: headers) }
+
+ subject { described_class.parse(request) }
+
+ context 'empty headers' do
+ let(:headers) { {} }
+
+ it 'fails with exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+
+ context 'missing sentry_key' do
+ let(:headers) { { 'X-Sentry-Auth' => "Sentry foo=bar" } }
+
+ it 'returns empty value for public_key' do
+ expect(subject[:public_key]).to be_nil
+ end
+ end
+
+ it 'returns correct value for public_key' do
+ expect(subject[:public_key]).to eq('glet_1fedb514e17f4b958435093deb02048c')
+ end
+ end
+end
diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb
index 05f3bb2f71a..9b2bb024fa6 100644
--- a/spec/lib/extracts_path_spec.rb
+++ b/spec/lib/extracts_path_spec.rb
@@ -213,20 +213,4 @@ RSpec.describe ExtractsPath do
expect(extract_ref_without_atom('foo.atom')).to eq(nil)
end
end
-
- describe '#lfs_blob_ids' do
- let(:tag) { @project.repository.add_tag(@project.owner, 'my-annotated-tag', 'master', 'test tag') }
- let(:ref) { tag.target }
- let(:params) { { ref: ref, path: 'README.md' } }
-
- before do
- @project = create(:project, :repository)
- end
-
- it 'handles annotated tags' do
- assign_ref_vars
-
- expect(lfs_blob_ids).to eq([])
- end
- end
end
diff --git a/spec/lib/feature/gitaly_spec.rb b/spec/lib/feature/gitaly_spec.rb
index 696427bb8b6..311589c3253 100644
--- a/spec/lib/feature/gitaly_spec.rb
+++ b/spec/lib/feature/gitaly_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe Feature::Gitaly do
context 'when table does not exist' do
before do
- allow(::Gitlab::Database).to receive(:cached_table_exists?).and_return(false)
+ allow(::Gitlab::Database.main).to receive(:cached_table_exists?).and_return(false)
end
it 'returns an empty Hash' do
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index dc8fd0de313..9d4820f9a4c 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -310,7 +310,7 @@ RSpec.describe Feature, stub_feature_flags: false do
context 'when database exists' do
before do
- allow(Gitlab::Database).to receive(:exists?).and_return(true)
+ allow(Gitlab::Database.main).to receive(:exists?).and_return(true)
end
it 'checks the persisted status and returns false' do
@@ -322,7 +322,7 @@ RSpec.describe Feature, stub_feature_flags: false do
context 'when database does not exist' do
before do
- allow(Gitlab::Database).to receive(:exists?).and_return(false)
+ allow(Gitlab::Database.main).to receive(:exists?).and_return(false)
end
it 'returns false without checking the status in the database' do
diff --git a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
index f38815acca6..207ecb88aad 100644
--- a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
let(:spec_ce_temp_dir) { Dir.mktmpdir }
let(:spec_ee_temp_dir) { Dir.mktmpdir }
let(:args) { ['CountFoo'] }
- let(:options) { { 'type' => 'redis_hll' } }
+ let(:options) { { 'type' => 'generic' } }
before do
stub_const("#{described_class}::CE_DIR", ce_temp_dir)
@@ -30,27 +30,39 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
describe 'Creating metric instrumentation files' do
let(:sample_metric_dir) { 'lib/generators/gitlab/usage_metric_generator' }
- let(:sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_metric.rb')) }
+ let(:generic_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_generic_metric.rb')) }
+ let(:database_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_database_metric.rb')) }
let(:sample_spec) { fixture_file(File.join(sample_metric_dir, 'sample_metric_test.rb')) }
it 'creates CE metric instrumentation files using the template' do
described_class.new(args, options).invoke_all
- expect_generated_file(ce_temp_dir, 'count_foo_metric.rb', sample_metric)
+ expect_generated_file(ce_temp_dir, 'count_foo_metric.rb', generic_sample_metric)
expect_generated_file(spec_ce_temp_dir, 'count_foo_metric_spec.rb', sample_spec)
end
context 'with EE flag true' do
- let(:options) { { 'type' => 'redis_hll', 'ee' => true } }
+ let(:options) { { 'type' => 'generic', 'ee' => true } }
it 'creates EE metric instrumentation files using the template' do
described_class.new(args, options).invoke_all
- expect_generated_file(ee_temp_dir, 'count_foo_metric.rb', sample_metric)
+ expect_generated_file(ee_temp_dir, 'count_foo_metric.rb', generic_sample_metric)
expect_generated_file(spec_ee_temp_dir, 'count_foo_metric_spec.rb', sample_spec)
end
end
+ context 'for database type' do
+ let(:options) { { 'type' => 'database', 'operation' => 'count' } }
+
+ it 'creates the metric instrumentation file using the template' do
+ described_class.new(args, options).invoke_all
+
+ expect_generated_file(ce_temp_dir, 'count_foo_metric.rb', database_sample_metric)
+ expect_generated_file(spec_ce_temp_dir, 'count_foo_metric_spec.rb', sample_spec)
+ end
+ end
+
context 'with type option missing' do
let(:options) { {} }
@@ -66,5 +78,21 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown type 'some_other_type'/)
end
end
+
+ context 'without operation for database metric' do
+ let(:options) { { 'type' => 'database' } }
+
+ it 'raises an ArgumentError' do
+ expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation ''/)
+ end
+ end
+
+ context 'with wrong operation for database metric' do
+ let(:options) { { 'type' => 'database', 'operation' => 'sleep' } }
+
+ it 'raises an ArgumentError' do
+ expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation 'sleep'/)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index ebc5ae2a632..4fe55ba0c0c 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -79,56 +79,6 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
include_context 'when records are loaded by maintainer'
end
-
- describe 'special case' do
- let(:mr1) { create(:merge_request, source_project: project, allow_broken: true, created_at: 20.days.ago) }
- let(:mr2) { create(:merge_request, source_project: project, allow_broken: true, created_at: 19.days.ago) }
- let(:ci_build1) { create(:ci_build) }
- let(:ci_build2) { create(:ci_build) }
- let(:default_stages) { Gitlab::Analytics::CycleAnalytics::DefaultStages }
- let(:stage) { build(:cycle_analytics_project_stage, default_stages.params_for_test_stage.merge(project: project)) }
-
- before do
- mr1.metrics.update!({
- merged_at: 5.days.ago,
- first_deployed_to_production_at: 1.day.ago,
- latest_build_started_at: 5.days.ago,
- latest_build_finished_at: 1.day.ago,
- pipeline: ci_build1.pipeline
- })
- mr2.metrics.update!({
- merged_at: 10.days.ago,
- first_deployed_to_production_at: 5.days.ago,
- latest_build_started_at: 9.days.ago,
- latest_build_finished_at: 7.days.ago,
- pipeline: ci_build2.pipeline
- })
-
- project.add_user(user, Gitlab::Access::MAINTAINER)
- end
-
- context 'returns build records' do
- shared_examples 'orders build records by `latest_build_finished_at`' do
- it 'orders by `latest_build_finished_at`' do
- build_ids = subject.map { |item| item[:id] }
-
- expect(build_ids).to eq([ci_build1.id, ci_build2.id])
- end
- end
-
- context 'when requesting records for default test stage' do
- include_examples 'orders build records by `latest_build_finished_at`'
- end
-
- context 'when requesting records for default staging stage' do
- before do
- stage.assign_attributes(default_stages.params_for_staging_stage)
- end
-
- include_examples 'orders build records by `latest_build_finished_at`'
- end
- end
- end
end
describe 'pagination' do
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 2d4239eb761..b0522e269e0 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -496,18 +496,6 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect(find_user_from_web_access_token(:archive)).to eq(user)
end
- context 'when allow_archive_as_web_access_format feature flag is disabled' do
- before do
- stub_feature_flags(allow_archive_as_web_access_format: false)
- end
-
- it 'returns nil for ARCHIVE requests' do
- set_header('SCRIPT_NAME', '/-/archive/main.zip')
-
- expect(find_user_from_web_access_token(:archive)).to be_nil
- end
- end
-
context 'for API requests' do
it 'returns the user' do
set_header('SCRIPT_NAME', '/api/endpoint')
diff --git a/spec/lib/gitlab/auth/result_spec.rb b/spec/lib/gitlab/auth/result_spec.rb
new file mode 100644
index 00000000000..f8de4b80db2
--- /dev/null
+++ b/spec/lib/gitlab/auth/result_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::Result do
+ let_it_be(:actor) { create(:user) }
+
+ subject { described_class.new(actor, nil, nil, []) }
+
+ context 'when actor is User' do
+ let_it_be(:actor) { create(:user) }
+
+ it 'returns auth_user' do
+ expect(subject.auth_user).to eq(actor)
+ end
+
+ it 'does not return deploy token' do
+ expect(subject.deploy_token).to be_nil
+ end
+ end
+
+ context 'when actor is Deploy token' do
+ let_it_be(:actor) { create(:deploy_token) }
+
+ it 'returns deploy token' do
+ expect(subject.deploy_token).to eq(actor)
+ end
+
+ it 'does not return auth_user' do
+ expect(subject.auth_user).to be_nil
+ end
+ end
+
+ describe '#authentication_abilities_include?' do
+ context 'when authentication abilities are empty' do
+ it 'returns false' do
+ expect(subject.authentication_abilities_include?(:read_code)).to be_falsey
+ end
+ end
+
+ context 'when authentication abilities are not empty' do
+ subject { described_class.new(actor, nil, nil, [:push_code]) }
+
+ it 'returns false when ability is not allowed' do
+ expect(subject.authentication_abilities_include?(:read_code)).to be_falsey
+ end
+
+ it 'returns true when ability is allowed' do
+ expect(subject.authentication_abilities_include?(:push_code)).to be_truthy
+ end
+ end
+ end
+
+ describe '#can_perform_action_on_project?' do
+ let(:project) { double }
+
+ it 'returns if actor can do perform given action on given project' do
+ expect(Ability).to receive(:allowed?).with(actor, :push_code, project).and_return(true)
+ expect(subject.can_perform_action_on_project?(:push_code, project)).to be_truthy
+ end
+
+ it 'returns if actor cannot do perform given action on given project' do
+ expect(Ability).to receive(:allowed?).with(actor, :push_code, project).and_return(false)
+ expect(subject.can_perform_action_on_project?(:push_code, project)).to be_falsey
+ end
+ end
+
+ describe '#can?' do
+ it 'returns if actor can do perform given action on given project' do
+ expect(actor).to receive(:can?).with(:push_code).and_return(true)
+ expect(subject.can?(:push_code)).to be_truthy
+ end
+
+ it 'returns if actor cannot do perform given action on given project' do
+ expect(actor).to receive(:can?).with(:push_code).and_return(false)
+ expect(subject.can?(:push_code)).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 1d708b17076..cc592bb8f24 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project) }
+ let(:auth_failure) { { actor: nil, project: nil, type: nil, authentication_abilities: nil } }
let(:gl_auth) { described_class }
describe 'constants' do
@@ -159,26 +160,26 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:project) { build.project }
it 'recognises user-less build' do
- expect(subject).to eq(Gitlab::Auth::Result.new(nil, build.project, :ci, described_class.build_authentication_abilities))
+ expect(subject).to have_attributes(actor: nil, project: build.project, type: :ci, authentication_abilities: described_class.build_authentication_abilities)
end
it 'recognises user token' do
build.update(user: create(:user))
- expect(subject).to eq(Gitlab::Auth::Result.new(build.user, build.project, :build, described_class.build_authentication_abilities))
+ expect(subject).to have_attributes(actor: build.user, project: build.project, type: :build, authentication_abilities: described_class.build_authentication_abilities)
end
it 'fails with blocked user token' do
build.update(user: create(:user, :blocked))
- expect(subject).to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ expect(subject).to have_attributes(auth_failure)
end
context 'username is not gitlab-ci-token' do
let(:username) { 'another_username' }
it 'fails to authenticate' do
- expect(subject).to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ expect(subject).to have_attributes(auth_failure)
end
end
end
@@ -189,7 +190,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:project) { build.project }
it 'denies authentication' do
- expect(subject).to eq(Gitlab::Auth::Result.new)
+ expect(subject).to have_attributes(auth_failure)
end
end
end
@@ -199,20 +200,20 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
project.create_drone_ci_integration(active: true)
project.drone_ci_integration.update(token: 'token')
- expect(gl_auth.find_for_git_client('drone-ci-token', 'token', project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(nil, project, :ci, described_class.build_authentication_abilities))
+ expect(gl_auth.find_for_git_client('drone-ci-token', 'token', project: project, ip: 'ip')).to have_attributes(actor: nil, project: project, type: :ci, authentication_abilities: described_class.build_authentication_abilities)
end
it 'recognizes master passwords' do
user = create(:user, password: 'password')
- expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
+ expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
include_examples 'user login operation with unique ip limit' do
let(:user) { create(:user, password: 'password') }
def operation
- expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
+ expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
end
@@ -221,14 +222,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user = create(:user)
token = Gitlab::LfsToken.new(user).token
- expect(gl_auth.find_for_git_client(user.username, token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :lfs_token, described_class.read_write_project_authentication_abilities))
+ expect(gl_auth.find_for_git_client(user.username, token, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :lfs_token, authentication_abilities: described_class.read_write_project_authentication_abilities)
end
it 'recognizes deploy key lfs tokens' do
key = create(:deploy_key)
token = Gitlab::LfsToken.new(key).token
- expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, described_class.read_only_authentication_abilities))
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: nil, ip: 'ip')).to have_attributes(actor: key, project: nil, type: :lfs_deploy_token, authentication_abilities: described_class.read_only_authentication_abilities)
end
it 'does not try password auth before oauth' do
@@ -245,14 +246,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
create(:deploy_keys_project, :write_access, deploy_key: key, project: project)
token = Gitlab::LfsToken.new(key).token
- expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, described_class.read_write_authentication_abilities))
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to have_attributes(actor: key, project: nil, type: :lfs_deploy_token, authentication_abilities: described_class.read_write_authentication_abilities)
end
it 'does not grant deploy key write permissions' do
key = create(:deploy_key)
token = Gitlab::LfsToken.new(key).token
- expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, described_class.read_only_authentication_abilities))
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to have_attributes(actor: key, project: nil, type: :lfs_deploy_token, authentication_abilities: described_class.read_only_authentication_abilities)
end
end
@@ -264,18 +265,18 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
shared_examples 'an oauth failure' do
it 'fails' do
expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ .to have_attributes(auth_failure)
end
end
it 'succeeds for OAuth tokens with the `api` scope' do
- expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :oauth, described_class.full_authentication_abilities))
+ expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :oauth, authentication_abilities: described_class.full_authentication_abilities)
end
it 'fails for OAuth tokens with other scopes' do
token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'read_user')
- expect(gl_auth.find_for_git_client("oauth2", token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(nil, nil))
+ expect(gl_auth.find_for_git_client("oauth2", token.token, project: nil, ip: 'ip')).to have_attributes(auth_failure)
end
it 'does not try password auth before oauth' do
@@ -342,7 +343,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
impersonation_token = create(:personal_access_token, :impersonation, scopes: ['api'])
expect(gl_auth.find_for_git_client('', impersonation_token.token, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ .to have_attributes(auth_failure)
end
it 'limits abilities based on scope' do
@@ -365,36 +366,27 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails if user is blocked' do
expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ .to have_attributes(auth_failure)
end
end
- context 'when using a project access token' do
- let_it_be(:project_bot_user) { create(:user, :project_bot) }
- let_it_be(:project_access_token) { create(:personal_access_token, user: project_bot_user) }
-
- context 'with valid project access token' do
- before do
- project.add_maintainer(project_bot_user)
- end
-
+ context 'when using a resource access token' do
+ shared_examples 'with a valid access token' do
it 'successfully authenticates the project bot' do
- expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: project, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
+ expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: project, ip: 'ip'))
+ .to have_attributes(actor: project_bot_user, project: nil, type: :personal_access_token, authentication_abilities: described_class.full_authentication_abilities)
end
it 'successfully authenticates the project bot with a nil project' do
- expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
+ expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: nil, ip: 'ip'))
+ .to have_attributes(actor: project_bot_user, project: nil, type: :personal_access_token, authentication_abilities: described_class.full_authentication_abilities)
end
end
- context 'with invalid project access token' do
- context 'when project bot is not a project member' do
- it 'fails for a non-project member' do
- expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: project, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
- end
+ shared_examples 'with an invalid access token' do
+ it 'fails for a non-member' do
+ expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: project, ip: 'ip'))
+ .to have_attributes(auth_failure )
end
context 'when project bot user is blocked' do
@@ -403,9 +395,59 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'fails for a blocked project bot' do
- expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: project, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: project, ip: 'ip'))
+ .to have_attributes(auth_failure )
+ end
+ end
+ end
+
+ context 'when using a personal namespace project access token' do
+ let_it_be(:project_bot_user) { create(:user, :project_bot) }
+ let_it_be(:access_token) { create(:personal_access_token, user: project_bot_user) }
+
+ context 'when the token belongs to the project' do
+ before do
+ project.add_maintainer(project_bot_user)
+ end
+
+ it_behaves_like 'with a valid access token'
+ end
+
+ it_behaves_like 'with an invalid access token'
+ end
+
+ context 'when in a group namespace' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ context 'when using a project access token' do
+ let_it_be(:project_bot_user) { create(:user, :project_bot) }
+ let_it_be(:access_token) { create(:personal_access_token, user: project_bot_user) }
+
+ context 'when token user belongs to the project' do
+ before do
+ project.add_maintainer(project_bot_user)
+ end
+
+ it_behaves_like 'with a valid access token'
+ end
+
+ it_behaves_like 'with an invalid access token'
+ end
+
+ context 'when using a group access token' do
+ let_it_be(:project_bot_user) { create(:user, name: 'Group token bot', email: "group_#{group.id}_bot@example.com", username: "group_#{group.id}_bot", user_type: :project_bot) }
+ let_it_be(:access_token) { create(:personal_access_token, user: project_bot_user) }
+
+ context 'when the token belongs to the group' do
+ before do
+ group.add_maintainer(project_bot_user)
+ end
+
+ it_behaves_like 'with a valid access token'
end
+
+ it_behaves_like 'with an invalid access token'
end
end
end
@@ -421,7 +463,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ .to have_attributes(auth_failure)
end
it 'goes through lfs authentication' do
@@ -432,7 +474,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
+ .to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
it 'goes through oauth authentication when the username is oauth2' do
@@ -443,14 +485,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
+ .to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
end
it 'returns double nil for invalid credentials' do
login = 'foo'
- expect(gl_auth.find_for_git_client(login, 'bar', project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new)
+ expect(gl_auth.find_for_git_client(login, 'bar', project: nil, ip: 'ip')).to have_attributes(auth_failure)
end
it 'throws an error suggesting user create a PAT when internal auth is disabled' do
@@ -460,27 +502,25 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
context 'while using deploy tokens' do
- let(:auth_failure) { Gitlab::Auth::Result.new(nil, nil) }
-
shared_examples 'registry token scope' do
it 'fails when login is not valid' do
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails when token is not valid' do
expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails if token is nil' do
expect(gl_auth.find_for_git_client(login, nil, project: nil, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails if token is not related to project' do
expect(gl_auth.find_for_git_client(login, 'abcdef', project: nil, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails if token has been revoked' do
@@ -488,7 +528,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect(deploy_token.revoked?).to be_truthy
expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: nil, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
end
@@ -500,7 +540,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails when login and token are valid' do
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
end
@@ -509,7 +549,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails when login and token are valid' do
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
end
end
@@ -520,17 +560,17 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:deploy_token) { create(:deploy_token, username: username, read_registry: false, projects: [project]) }
it 'succeeds for the token' do
- auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
+ auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:download_code] }
expect(gl_auth.find_for_git_client(username, deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it 'succeeds for the user' do
- auth_success = Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities)
+ auth_success = { actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities }
expect(gl_auth.find_for_git_client(username, 'my-secret', project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
end
@@ -538,16 +578,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'and belong to the same project' do
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [project]) }
- let(:auth_success) { Gitlab::Auth::Result.new(read_repository, project, :deploy_token, [:download_code]) }
+ let(:auth_success) { { actor: read_repository, project: project, type: :deploy_token, authentication_abilities: [:download_code] } }
it 'succeeds for the right token' do
expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it 'fails for the wrong token' do
expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: project, ip: 'ip'))
- .not_to eq(auth_success)
+ .not_to have_attributes(auth_success)
end
end
@@ -556,16 +596,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [other_project]) }
- let(:auth_success) { Gitlab::Auth::Result.new(read_repository, other_project, :deploy_token, [:download_code]) }
+ let(:auth_success) { { actor: read_repository, project: other_project, type: :deploy_token, authentication_abilities: [:download_code] } }
it 'succeeds for the right token' do
expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: other_project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it 'fails for the wrong token' do
expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: other_project, ip: 'ip'))
- .not_to eq(auth_success)
+ .not_to have_attributes(auth_success)
end
end
end
@@ -575,18 +615,18 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:login) { deploy_token.username }
it 'succeeds when login and token are valid' do
- auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
+ auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:download_code] }
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it 'succeeds when custom login and token are valid' do
deploy_token = create(:deploy_token, username: 'deployer', read_registry: false, projects: [project])
- auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
+ auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:download_code] }
expect(gl_auth.find_for_git_client('deployer', deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it 'does not attempt to rate limit unique IPs for a deploy token' do
@@ -597,23 +637,23 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails when login is not valid' do
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails when token is not valid' do
expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails if token is nil' do
expect(gl_auth.find_for_git_client(login, nil, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails if token is not related to project' do
another_deploy_token = create(:deploy_token)
expect(gl_auth.find_for_git_client(another_deploy_token.username, another_deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
it 'fails if token has been revoked' do
@@ -621,7 +661,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect(deploy_token.revoked?).to be_truthy
expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
end
@@ -633,16 +673,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
subject { gl_auth.find_for_git_client(login, deploy_token.token, project: project_with_group, ip: 'ip') }
it 'succeeds when login and a group deploy token are valid' do
- auth_success = Gitlab::Auth::Result.new(deploy_token, project_with_group, :deploy_token, [:download_code, :read_container_image])
+ auth_success = { actor: deploy_token, project: project_with_group, type: :deploy_token, authentication_abilities: [:download_code, :read_container_image] }
- expect(subject).to eq(auth_success)
+ expect(subject).to have_attributes(auth_success)
end
it 'fails if token is not related to group' do
another_deploy_token = create(:deploy_token, :group, read_repository: true)
expect(gl_auth.find_for_git_client(another_deploy_token.username, another_deploy_token.token, project: project_with_group, ip: 'ip'))
- .to eq(auth_failure)
+ .to have_attributes(auth_failure)
end
end
@@ -656,10 +696,10 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'succeeds when login and a project token are valid' do
- auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:read_container_image])
+ auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:read_container_image] }
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it_behaves_like 'registry token scope'
@@ -678,10 +718,10 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'succeeds when login and a project token are valid' do
- auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:create_container_image])
+ auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:create_container_image] }
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_success)
+ .to have_attributes(auth_success)
end
it_behaves_like 'registry token scope'
@@ -891,6 +931,6 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
def expect_results_with_abilities(personal_access_token, abilities, success = true)
expect(gl_auth.find_for_git_client('', personal_access_token&.token, project: nil, ip: 'ip'))
- .to eq(Gitlab::Auth::Result.new(personal_access_token&.user, nil, personal_access_token.nil? ? nil : :personal_access_token, abilities))
+ .to have_attributes(actor: personal_access_token&.user, project: nil, type: personal_access_token.nil? ? nil : :personal_access_token, authentication_abilities: abilities)
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
index f56cf899410..a7895623d6f 100644
--- a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
@@ -23,6 +23,8 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests d
end
context "for MRs with #draft? == true titles but draft attribute false" do
+ let(:mr_ids) { merge_requests.all.collect(&:id) }
+
before do
draft_prefixes.each do |prefix|
(1..4).each do |n|
@@ -37,11 +39,16 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests d
it "updates all open draft merge request's draft field to true" do
mr_count = merge_requests.all.count
- mr_ids = merge_requests.all.collect(&:id)
expect { subject.perform(mr_ids.first, mr_ids.last) }
.to change { MergeRequest.where(draft: false).count }
.from(mr_count).to(mr_count - draft_prefixes.length)
end
+
+ it "marks successful slices as completed" do
+ expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
+
+ subject.perform(mr_ids.first, mr_ids.last)
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
new file mode 100644
index 00000000000..8f765a7a536
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew do
+ let(:migration) { described_class.new }
+ let(:integrations) { table(:integrations) }
+ let(:namespaced_integrations) { Gitlab::Integrations::StiType.namespaced_integrations }
+
+ before do
+ integrations.connection.execute 'ALTER TABLE integrations DISABLE TRIGGER "trigger_type_new_on_insert"'
+
+ namespaced_integrations.each_with_index do |type, i|
+ integrations.create!(id: i + 1, type: "#{type}Service")
+ end
+
+ integrations.create!(id: namespaced_integrations.size + 1, type: 'LegacyService')
+ ensure
+ integrations.connection.execute 'ALTER TABLE integrations ENABLE TRIGGER "trigger_type_new_on_insert"'
+ end
+
+ it 'backfills `type_new` for the selected records' do
+ # We don't want to mock `Kernel.sleep`, so instead we mock it on the migration
+ # class before it gets forwarded.
+ expect(migration).to receive(:sleep).with(0.05).exactly(5).times
+
+ queries = ActiveRecord::QueryRecorder.new do
+ migration.perform(2, 10, :integrations, :id, 2, 50)
+ end
+
+ expect(queries.count).to be(16)
+ expect(queries.log.grep(/^SELECT/).size).to be(11)
+ expect(queries.log.grep(/^UPDATE/).size).to be(5)
+ expect(queries.log.grep(/^UPDATE/).join.scan(/WHERE .*/)).to eq([
+ 'WHERE integrations.id BETWEEN 2 AND 3',
+ 'WHERE integrations.id BETWEEN 4 AND 5',
+ 'WHERE integrations.id BETWEEN 6 AND 7',
+ 'WHERE integrations.id BETWEEN 8 AND 9',
+ 'WHERE integrations.id BETWEEN 10 AND 10'
+ ])
+
+ expect(integrations.where(id: 2..10).pluck(:type, :type_new)).to contain_exactly(
+ ['AssemblaService', 'Integrations::Assembla'],
+ ['BambooService', 'Integrations::Bamboo'],
+ ['BugzillaService', 'Integrations::Bugzilla'],
+ ['BuildkiteService', 'Integrations::Buildkite'],
+ ['CampfireService', 'Integrations::Campfire'],
+ ['ConfluenceService', 'Integrations::Confluence'],
+ ['CustomIssueTrackerService', 'Integrations::CustomIssueTracker'],
+ ['DatadogService', 'Integrations::Datadog'],
+ ['DiscordService', 'Integrations::Discord']
+ )
+
+ expect(integrations.where.not(id: 2..10)).to all(have_attributes(type_new: nil))
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index dbf74bd9333..d22aa86dbe0 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -304,7 +304,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
def blob_at(snippet, path)
- raw_repository(snippet).blob_at('master', path)
+ raw_repository(snippet).blob_at('main', path)
end
def repository_exists?(snippet)
diff --git a/spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb b/spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb
new file mode 100644
index 00000000000..db822f36c21
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::CopyCiBuildsColumnsToSecurityScans, schema: 20210728174349 do
+ let(:migration) { described_class.new }
+
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:ci_pipelines) { table(:ci_pipelines) }
+ let_it_be(:ci_builds) { table(:ci_builds) }
+ let_it_be(:security_scans) { table(:security_scans) }
+
+ let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
+ let!(:project1) { projects.create!(namespace_id: namespace.id) }
+ let!(:project2) { projects.create!(namespace_id: namespace.id) }
+ let!(:pipeline1) { ci_pipelines.create!(status: "success")}
+ let!(:pipeline2) { ci_pipelines.create!(status: "success")}
+
+ let!(:build1) { ci_builds.create!(commit_id: pipeline1.id, type: 'Ci::Build', project_id: project1.id) }
+ let!(:build2) { ci_builds.create!(commit_id: pipeline2.id, type: 'Ci::Build', project_id: project2.id) }
+ let!(:build3) { ci_builds.create!(commit_id: pipeline1.id, type: 'Ci::Build', project_id: project1.id) }
+
+ let!(:scan1) { security_scans.create!(build_id: build1.id, scan_type: 1) }
+ let!(:scan2) { security_scans.create!(build_id: build2.id, scan_type: 1) }
+ let!(:scan3) { security_scans.create!(build_id: build3.id, scan_type: 1) }
+
+ subject { migration.perform(scan1.id, scan2.id) }
+
+ before do
+ stub_const("#{described_class}::UPDATE_BATCH_SIZE", 2)
+ end
+
+ it 'copies `project_id`, `commit_id` from `ci_builds` to `security_scans`', :aggregate_failures do
+ expect(migration).to receive(:mark_job_as_succeeded).with(scan1.id, scan2.id)
+
+ subject
+
+ scan1.reload
+ expect(scan1.project_id).to eq(project1.id)
+ expect(scan1.pipeline_id).to eq(pipeline1.id)
+
+ scan2.reload
+ expect(scan2.project_id).to eq(project2.id)
+ expect(scan2.pipeline_id).to eq(pipeline2.id)
+
+ scan3.reload
+ expect(scan3.project_id).to be_nil
+ expect(scan3.pipeline_id).to be_nil
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
index 70906961641..30908145782 100644
--- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -13,12 +13,13 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
let(:vulnerabilities) { table(:vulnerabilities) }
let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+
let(:vulnerability_identifier) do
vulnerability_identifiers.create!(
project_id: project.id,
external_type: 'uuid-v5',
external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ fingerprint: Gitlab::Database::ShaAttribute.serialize('7e394d1b1eb461a7406d7b1e08f057a1cf11287a'),
name: 'Identifier for UUIDv5')
end
@@ -27,7 +28,7 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
project_id: project.id,
external_type: 'uuid-v4',
external_id: 'uuid-v4',
- fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89',
+ fingerprint: Gitlab::Database::ShaAttribute.serialize('772da93d34a1ba010bcb5efa9fb6f8e01bafcc89'),
name: 'Identifier for UUIDv4')
end
@@ -59,7 +60,7 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
scanner_id: different_scanner.id,
primary_identifier_id: different_vulnerability_identifier.id,
report_type: 0, # "sast"
- location_fingerprint: "fa18f432f1d56675f4098d318739c3cd5b14eb3e",
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"),
uuid: known_uuid_v4
)
end
@@ -91,7 +92,7 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
report_type: 0, # "sast"
- location_fingerprint: "838574be0210968bf6b9f569df9c2576242cbf0a",
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize("838574be0210968bf6b9f569df9c2576242cbf0a"),
uuid: known_uuid_v5
)
end
@@ -115,7 +116,7 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
scanner_id: different_scanner.id,
primary_identifier_id: different_vulnerability_identifier.id,
report_type: 0, # "sast"
- location_fingerprint: "fa18f432f1d56675f4098d318739c3cd5b14eb3e",
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"),
uuid: known_uuid_v4
)
diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb
index 5b20572578c..f32e6891716 100644
--- a/spec/lib/gitlab/background_migration_spec.rb
+++ b/spec/lib/gitlab/background_migration_spec.rb
@@ -13,7 +13,11 @@ RSpec.describe Gitlab::BackgroundMigration do
describe '.steal' do
context 'when there are enqueued jobs present' do
let(:queue) do
- [double(args: ['Foo', [10, 20]], queue: described_class.queue)]
+ [
+ double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'),
+ double(args: ['Bar', [20, 30]], klass: 'BackgroundMigrationWorker'),
+ double(args: ['Foo', [20, 30]], klass: 'MergeWorker')
+ ]
end
before do
@@ -45,7 +49,7 @@ RSpec.describe Gitlab::BackgroundMigration do
expect(queue[0]).not_to receive(:delete)
- described_class.steal('Bar')
+ described_class.steal('Baz')
end
context 'when a custom predicate is given' do
@@ -72,8 +76,8 @@ RSpec.describe Gitlab::BackgroundMigration do
let(:migration) { spy(:migration) }
let(:queue) do
- [double(args: ['Foo', [10, 20]], queue: described_class.queue),
- double(args: ['Foo', [20, 30]], queue: described_class.queue)]
+ [double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'),
+ double(args: ['Foo', [20, 30]], klass: 'BackgroundMigrationWorker')]
end
before do
@@ -128,11 +132,11 @@ RSpec.describe Gitlab::BackgroundMigration do
context 'when retry_dead_jobs is true', :redis do
let(:retry_queue) do
- [double(args: ['Object', [3]], queue: described_class.queue, delete: true)]
+ [double(args: ['Object', [3]], klass: 'BackgroundMigrationWorker', delete: true)]
end
let(:dead_queue) do
- [double(args: ['Object', [4]], queue: described_class.queue, delete: true)]
+ [double(args: ['Object', [4]], klass: 'BackgroundMigrationWorker', delete: true)]
end
before do
@@ -187,20 +191,22 @@ RSpec.describe Gitlab::BackgroundMigration do
describe '.remaining', :redis do
context 'when there are jobs remaining' do
- let(:queue) { Array.new(12) }
-
before do
- allow(Sidekiq::Queue).to receive(:new)
- .with(described_class.queue)
- .and_return(Array.new(12))
-
Sidekiq::Testing.disable! do
- BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
+ MergeWorker.perform_async('Foo')
+ MergeWorker.perform_in(10.minutes, 'Foo')
+
+ 5.times do
+ BackgroundMigrationWorker.perform_async('Foo')
+ end
+ 3.times do
+ BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
+ end
end
end
it 'returns the enqueued jobs plus the scheduled jobs' do
- expect(described_class.remaining).to eq(13)
+ expect(described_class.remaining).to eq(8)
end
end
@@ -211,16 +217,13 @@ RSpec.describe Gitlab::BackgroundMigration do
end
end
- describe '.exists?' do
+ describe '.exists?', :redis do
context 'when there are enqueued jobs present' do
- let(:queue) do
- [double(args: ['Foo', [10, 20]], queue: described_class.queue)]
- end
-
before do
- allow(Sidekiq::Queue).to receive(:new)
- .with(described_class.queue)
- .and_return(queue)
+ Sidekiq::Testing.disable! do
+ MergeWorker.perform_async('Bar')
+ BackgroundMigrationWorker.perform_async('Foo')
+ end
end
it 'returns true if specific job exists' do
@@ -232,19 +235,14 @@ RSpec.describe Gitlab::BackgroundMigration do
end
end
- context 'when there are scheduled jobs present', :redis do
+ context 'when there are scheduled jobs present' do
before do
Sidekiq::Testing.disable! do
+ MergeWorker.perform_in(10.minutes, 'Bar')
BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
-
- expect(Sidekiq::ScheduledSet.new).to be_one
end
end
- after do
- Sidekiq::ScheduledSet.new.clear
- end
-
it 'returns true if specific job exists' do
expect(described_class.exists?('Foo')).to eq(true)
end
@@ -257,7 +255,10 @@ RSpec.describe Gitlab::BackgroundMigration do
describe '.dead_jobs?' do
let(:queue) do
- [double(args: ['Foo', [10, 20]], queue: described_class.queue)]
+ [
+ double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'),
+ double(args: ['Bar'], klass: 'MergeWorker')
+ ]
end
context 'when there are dead jobs present' do
@@ -277,7 +278,10 @@ RSpec.describe Gitlab::BackgroundMigration do
describe '.retrying_jobs?' do
let(:queue) do
- [double(args: ['Foo', [10, 20]], queue: described_class.queue)]
+ [
+ double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'),
+ double(args: ['Bar'], klass: 'MergeWorker')
+ ]
end
context 'when there are dead jobs present' do
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index c9ad78ec760..4e4d921d67f 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -32,8 +32,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
expect(subject).to receive(:delete_temp_branches)
expect(project.repository).to receive(:fetch_as_mirror)
.with('http://bitbucket:test@my-bitbucket',
- refmap: [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head'],
- remote_name: 'bitbucket_server')
+ refmap: [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head'])
subject.execute
end
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index a46732f8255..4a74dfcec34 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -3,40 +3,199 @@
require 'spec_helper'
RSpec.describe Gitlab::Checks::ChangesAccess do
+ include_context 'changes access checks context'
+
+ subject { changes_access }
+
describe '#validate!' do
- include_context 'changes access checks context'
+ shared_examples '#validate!' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
- before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
- end
+ context 'without failed checks' do
+ it "doesn't raise an error" do
+ expect { subject.validate! }.not_to raise_error
+ end
- subject { changes_access }
+ it 'calls lfs checks' do
+ expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
+ expect(instance).to receive(:validate!)
+ end
- context 'without failed checks' do
- it "doesn't raise an error" do
- expect { subject.validate! }.not_to raise_error
+ subject.validate!
+ end
end
- it 'calls lfs checks' do
- expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
- expect(instance).to receive(:validate!)
+ context 'when time limit was reached' do
+ it 'raises a TimeoutError' do
+ logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout)
+ access = described_class.new(changes,
+ project: project,
+ user_access: user_access,
+ protocol: protocol,
+ logger: logger)
+
+ expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
end
+ end
+ end
+
+ context 'with batched commits enabled' do
+ before do
+ stub_feature_flags(changes_batch_commits: true)
+ end
+
+ it_behaves_like '#validate!'
+ end
+
+ context 'with batched commits disabled' do
+ before do
+ stub_feature_flags(changes_batch_commits: false)
+ end
+
+ it_behaves_like '#validate!'
+ end
+ end
+
+ describe '#commits' do
+ it 'calls #new_commits' do
+ expect(project.repository).to receive(:new_commits).and_call_original
+
+ expect(subject.commits).to eq([])
+ end
+
+ context 'when changes contain empty revisions' do
+ let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ let(:expected_commit) { instance_double(Commit) }
+
+ it 'returns only commits with non empty revisions' do
+ expect(project.repository).to receive(:new_commits).with([newrev], { allow_quarantine: true }) { [expected_commit] }
+ expect(subject.commits).to eq([expected_commit])
+ end
+ end
+ end
+
+ describe '#commits_for' do
+ let(:new_commits) { [] }
+ let(:expected_commits) { [] }
+
+ shared_examples 'a listing of new commits' do
+ it 'returns expected commits' do
+ expect(subject).to receive(:commits).and_return(new_commits)
+
+ expect(subject.commits_for(newrev)).to eq(expected_commits)
+ end
+ end
+
+ context 'with no commits' do
+ it_behaves_like 'a listing of new commits'
+ end
+
+ context 'with unrelated commits' do
+ let(:new_commits) { [create_commit('1234', %w[1111 2222])] }
+
+ it_behaves_like 'a listing of new commits'
+ end
+
+ context 'with single related commit' do
+ let(:new_commits) { [create_commit(newrev, %w[1111 2222])] }
+ let(:expected_commits) { new_commits }
- subject.validate!
+ it_behaves_like 'a listing of new commits'
+ end
+
+ context 'with single related and unrelated commit' do
+ let(:new_commits) do
+ [
+ create_commit(newrev, %w[1111 2222]),
+ create_commit('abcd', %w[1111 2222])
+ ]
+ end
+
+ let(:expected_commits) do
+ [create_commit(newrev, %w[1111 2222])]
end
+
+ it_behaves_like 'a listing of new commits'
end
- context 'when time limit was reached' do
- it 'raises a TimeoutError' do
- logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout)
- access = described_class.new(changes,
- project: project,
- user_access: user_access,
- protocol: protocol,
- logger: logger)
+ context 'with multiple related commits' do
+ let(:new_commits) do
+ [
+ create_commit(newrev, %w[1111]),
+ create_commit('1111', %w[2222]),
+ create_commit('abcd', [])
+ ]
+ end
- expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
+ let(:expected_commits) do
+ [
+ create_commit(newrev, %w[1111]),
+ create_commit('1111', %w[2222])
+ ]
end
+
+ it_behaves_like 'a listing of new commits'
end
+
+ context 'with merge commits' do
+ let(:new_commits) do
+ [
+ create_commit(newrev, %w[1111 2222 3333]),
+ create_commit('1111', []),
+ create_commit('3333', %w[4444]),
+ create_commit('4444', [])
+ ]
+ end
+
+ let(:expected_commits) do
+ [
+ create_commit(newrev, %w[1111 2222 3333]),
+ create_commit('1111', []),
+ create_commit('3333', %w[4444]),
+ create_commit('4444', [])
+ ]
+ end
+
+ it_behaves_like 'a listing of new commits'
+ end
+
+ context 'with criss-cross merges' do
+ let(:new_commits) do
+ [
+ create_commit(newrev, %w[a1 b1]),
+ create_commit('a1', %w[a2 b2]),
+ create_commit('a2', %w[a3 b3]),
+ create_commit('a3', %w[c]),
+ create_commit('b1', %w[b2 a2]),
+ create_commit('b2', %w[b3 a3]),
+ create_commit('b3', %w[c]),
+ create_commit('c', [])
+ ]
+ end
+
+ let(:expected_commits) do
+ [
+ create_commit(newrev, %w[a1 b1]),
+ create_commit('a1', %w[a2 b2]),
+ create_commit('b1', %w[b2 a2]),
+ create_commit('a2', %w[a3 b3]),
+ create_commit('b2', %w[b3 a3]),
+ create_commit('a3', %w[c]),
+ create_commit('b3', %w[c]),
+ create_commit('c', [])
+ ]
+ end
+
+ it_behaves_like 'a listing of new commits'
+ end
+ end
+
+ def create_commit(id, parent_ids)
+ Gitlab::Git::Commit.new(project.repository, {
+ id: id,
+ parent_ids: parent_ids
+ })
end
end
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
index feda488a936..2e562a5a350 100644
--- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -49,12 +49,11 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do
end
end
- context 'with load balancing enabled', :request_store, :redis do
+ context 'with load balancing enabled', :db_load_balancing do
let(:session) { ::Gitlab::Database::LoadBalancing::Session.current }
let(:all_caught_up) { true }
before do
- expect(::Gitlab::Database::LoadBalancing).to receive(:enable?).at_least(:once).and_return(true)
allow(::Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?).and_return(all_caught_up)
expect(::Gitlab::Database::LoadBalancing::Sticking).to receive(:select_valid_host).with(:project, project.id).and_call_original
diff --git a/spec/lib/gitlab/checks/single_change_access_spec.rb b/spec/lib/gitlab/checks/single_change_access_spec.rb
index 8b235005b3e..e81e4951539 100644
--- a/spec/lib/gitlab/checks/single_change_access_spec.rb
+++ b/spec/lib/gitlab/checks/single_change_access_spec.rb
@@ -58,5 +58,52 @@ RSpec.describe Gitlab::Checks::SingleChangeAccess do
expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
end
end
+
+ describe '#commits' do
+ let(:expected_commits) { [Gitlab::Git::Commit.new(project.repository, { id: "1234" })] }
+
+ let(:access) do
+ described_class.new(changes,
+ project: project,
+ user_access: user_access,
+ protocol: protocol,
+ logger: logger,
+ commits: provided_commits)
+ end
+
+ shared_examples '#commits' do
+ it 'returns expected commits' do
+ expect(access.commits).to eq(expected_commits)
+ end
+
+ it 'returns expected commits on repeated calls' do
+ expect(access.commits).to eq(expected_commits)
+ expect(access.commits).to eq(expected_commits)
+ end
+ end
+
+ context 'with provided commits' do
+ let(:provided_commits) { expected_commits }
+
+ before do
+ expect(project.repository).not_to receive(:new_commits)
+ end
+
+ it_behaves_like '#commits'
+ end
+
+ context 'without provided commits' do
+ let(:provided_commits) { nil }
+
+ before do
+ expect(project.repository)
+ .to receive(:new_commits)
+ .once
+ .and_return(expected_commits)
+ end
+
+ it_behaves_like '#commits'
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index bf1f2bae7da..27c2b005a93 100644
--- a/spec/lib/gitlab/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -150,6 +150,10 @@ RSpec.describe Gitlab::Ci::Ansi2html do
expect(convert_html("\r\n")).to eq('<span><br/></span>')
end
+ it 'replaces invalid UTF-8 data' do
+ expect(convert_html("UTF-8 dashes here: ───\n🐤🐤🐤🐤\xF0\x9F\x90\n")).to eq("<span>UTF-8 dashes here: ───<br/>🐤🐤🐤🐤�<br/></span>")
+ end
+
describe "incremental update" do
shared_examples 'stateable converter' do
let(:pass1_stream) { StringIO.new(pre_text) }
diff --git a/spec/lib/gitlab/ci/build/auto_retry_spec.rb b/spec/lib/gitlab/ci/build/auto_retry_spec.rb
index b107553bbce..e83e1326206 100644
--- a/spec/lib/gitlab/ci/build/auto_retry_spec.rb
+++ b/spec/lib/gitlab/ci/build/auto_retry_spec.rb
@@ -53,24 +53,8 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do
context 'with retries max config option' do
let(:build) { create(:ci_build, options: { retry: { max: 1 } }) }
- context 'when build_metadata_config is set' do
- before do
- stub_feature_flags(ci_build_metadata_config: true)
- end
-
- it 'returns the number of configured max retries' do
- expect(result).to eq 1
- end
- end
-
- context 'when build_metadata_config is not set' do
- before do
- stub_feature_flags(ci_build_metadata_config: false)
- end
-
- it 'returns the number of configured max retries' do
- expect(result).to eq 1
- end
+ it 'returns the number of configured max retries' do
+ expect(result).to eq 1
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index d294eca7f15..6c9c8fa5df5 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -106,7 +106,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
ignore: false,
stage: 'test',
only: { refs: %w[branches tags] },
- variables: {},
job_variables: {},
root_variables_inheritance: true,
scheduling_type: :stage)
@@ -131,7 +130,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
ignore: false,
stage: 'test',
only: { refs: %w[branches tags] },
- variables: {},
job_variables: {},
root_variables_inheritance: true,
scheduling_type: :stage)
@@ -287,7 +285,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
only: { refs: %w[branches tags] },
parallel: { matrix: [{ 'PROVIDER' => ['aws'], 'STACK' => %w(monitoring app1) },
{ 'PROVIDER' => ['gcp'], 'STACK' => %w(data) }] },
- variables: {},
job_variables: {},
root_variables_inheritance: true,
scheduling_type: :stage
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
new file mode 100644
index 00000000000..b99048e2c18
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
+ let(:factory) do
+ Gitlab::Config::Entry::Factory.new(described_class)
+ .value(config)
+ end
+
+ subject(:entry) { factory.create! }
+
+ describe '.new' do
+ shared_examples 'an invalid config' do |error_message|
+ it { is_expected.not_to be_valid }
+
+ it 'has errors' do
+ expect(entry.errors).to include(error_message)
+ end
+ end
+
+ context 'when specifying an if: clause' do
+ let(:config) { { if: '$THIS || $THAT' } }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'using a list of multiple expressions' do
+ let(:config) { { if: ['$MY_VAR == "this"', '$YOUR_VAR == "that"'] } }
+
+ it_behaves_like 'an invalid config', /invalid expression syntax/
+ end
+
+ context 'when specifying an invalid if: clause expression' do
+ let(:config) { { if: ['$MY_VAR =='] } }
+
+ it_behaves_like 'an invalid config', /invalid expression syntax/
+ end
+
+ context 'when specifying an if: clause expression with an invalid token' do
+ let(:config) { { if: ['$MY_VAR == 123'] } }
+
+ it_behaves_like 'an invalid config', /invalid expression syntax/
+ end
+
+ context 'when using invalid regex in an if: clause' do
+ let(:config) { { if: ['$MY_VAR =~ /some ( thing/'] } }
+
+ it_behaves_like 'an invalid config', /invalid expression syntax/
+ end
+
+ context 'when using an if: clause with lookahead regex character "?"' do
+ let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
+
+ context 'when allow_unsafe_ruby_regexp is disabled' do
+ it_behaves_like 'an invalid config', /invalid expression syntax/
+ end
+ end
+
+ context 'when specifying unknown policy' do
+ let(:config) { { invalid: :something } }
+
+ it_behaves_like 'an invalid config', /unknown keys: invalid/
+ end
+
+ context 'when clause is empty' do
+ let(:config) { {} }
+
+ it_behaves_like 'an invalid config', /can't be blank/
+ end
+
+ context 'when policy strategy does not match' do
+ let(:config) { 'string strategy' }
+
+ it_behaves_like 'an invalid config', /should be a hash/
+ end
+ end
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ context 'when specifying an if: clause' do
+ let(:config) { { if: '$THIS || $THAT' } }
+
+ it 'returns the config' do
+ expect(subject).to eq(if: '$THIS || $THAT')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
new file mode 100644
index 00000000000..c255d6e9dd6
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules do
+ let(:factory) do
+ Gitlab::Config::Entry::Factory.new(described_class)
+ .value(config)
+ end
+
+ subject(:entry) { factory.create! }
+
+ describe '.new' do
+ shared_examples 'a valid config' do
+ it { is_expected.to be_valid }
+
+ context 'when composed' do
+ before do
+ entry.compose!
+ end
+
+ it { is_expected.to be_valid }
+ end
+ end
+
+ shared_examples 'an invalid config' do |error_message|
+ it { is_expected.not_to be_valid }
+
+ it 'has errors' do
+ expect(entry.errors).to include(error_message)
+ end
+ end
+
+ context 'with an "if"' do
+ let(:config) do
+ [{ if: '$THIS == "that"' }]
+ end
+
+ it_behaves_like 'a valid config'
+ end
+
+ context 'with a "changes"' do
+ let(:config) do
+ [{ changes: ['filename.txt'] }]
+ end
+
+ context 'when composed' do
+ before do
+ entry.compose!
+ end
+
+ it_behaves_like 'an invalid config', /contains unknown keys: changes/
+ end
+ end
+
+ context 'with a list of two rules' do
+ let(:config) do
+ [
+ { if: '$THIS == "that"' },
+ { if: '$SKIP' }
+ ]
+ end
+
+ it_behaves_like 'a valid config'
+ end
+
+ context 'without an array' do
+ let(:config) do
+ { if: '$SKIP' }
+ end
+
+ it_behaves_like 'an invalid config', /should be a array/
+ end
+ end
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ context 'with an "if"' do
+ let(:config) do
+ [{ if: '$THIS == "that"' }]
+ end
+
+ it { is_expected.to eq(config) }
+ end
+
+ context 'with a list of two rules' do
+ let(:config) do
+ [
+ { if: '$THIS == "that"' },
+ { if: '$SKIP' }
+ ]
+ end
+
+ it { is_expected.to eq(config) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/include_spec.rb b/spec/lib/gitlab/ci/config/entry/include_spec.rb
index 59f0b0e7a48..275cdcddeb0 100644
--- a/spec/lib/gitlab/ci/config/entry/include_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe ::Gitlab::Ci::Config::Entry::Include do
subject(:include_entry) { described_class.new(config) }
@@ -86,6 +86,22 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Include do
end
end
end
+
+ context 'when using with "rules"' do
+ let(:config) { { local: 'test.yml', rules: [{ if: '$VARIABLE' }] } }
+
+ it { is_expected.to be_valid }
+
+ context 'when rules is not an array of hashes' do
+ let(:config) { { local: 'test.yml', rules: ['$VARIABLE'] } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'has specific error' do
+ expect(include_entry.errors).to include('include rules should be an array of hashes')
+ end
+ end
+ end
end
context 'when value is something else' do
@@ -94,4 +110,26 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Include do
it { is_expected.not_to be_valid }
end
end
+
+ describe '#value' do
+ subject(:value) { include_entry.value }
+
+ context 'when config is a string' do
+ let(:config) { 'test.yml' }
+
+ it { is_expected.to eq('test.yml') }
+ end
+
+ context 'when config is a hash' do
+ let(:config) { { local: 'test.yml' } }
+
+ it { is_expected.to eq(local: 'test.yml') }
+ end
+
+ context 'when config has "rules"' do
+ let(:config) { { local: 'test.yml', rules: [{ if: '$VARIABLE' }] } }
+
+ it { is_expected.to eq(local: 'test.yml', rules: [{ if: '$VARIABLE' }]) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb
index b1a8fbcdbe0..bdb4d25c142 100644
--- a/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/inherit/variables_spec.rb
@@ -24,19 +24,4 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Inherit::Variables do
end
end
end
-
- describe '#inherit?' do
- where(:config, :inherit) do
- true | true
- false | false
- %w[A] | true
- %w[B] | false
- end
-
- with_them do
- it do
- expect(subject.inherit?('A')).to eq(inherit)
- end
- end
- end
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 1d23ab0c2c7..5b47d3a3922 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -434,20 +434,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
expect(entry.errors).to include 'job dependencies the another-job should be part of needs'
end
end
-
- context 'when stage: is missing' do
- let(:config) do
- {
- script: 'echo',
- needs: ['build-job']
- }
- end
-
- it 'returns error about invalid data' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'job config missing required keys: stage'
- end
- end
end
context 'when timeout value is not correct' do
@@ -626,7 +612,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
ignore: false,
after_script: %w[cleanup],
only: { refs: %w[branches tags] },
- variables: {},
job_variables: {},
root_variables_inheritance: true,
scheduling_type: :stage)
diff --git a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
index cb73044b62b..9a2a67389fc 100644
--- a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
@@ -99,7 +99,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Jobs do
only: { refs: %w[branches tags] },
stage: 'test',
trigger: { project: 'my/project' },
- variables: {},
job_variables: {},
root_variables_inheritance: true,
scheduling_type: :stage
@@ -110,7 +109,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Jobs do
only: { refs: %w[branches tags] },
script: ['something'],
stage: 'test',
- variables: {},
job_variables: {},
root_variables_inheritance: true,
scheduling_type: :stage
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index f98a6a869d6..b872f6644a2 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -362,76 +362,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
context 'with inheritance' do
- context 'of variables' do
- let(:config) do
- { variables: { A: 'job', B: 'job' } }
- end
-
- before do
- entry.compose!(deps)
- end
-
- context 'with only job variables' do
- it 'does return defined variables' do
- expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job' },
- job_variables: { 'A' => 'job', 'B' => 'job' },
- root_variables_inheritance: true
- )
- end
- end
-
- context 'when root yaml variables are used' do
- let(:variables) do
- Gitlab::Ci::Config::Entry::Variables.new(
- { A: 'root', C: 'root', D: 'root' }
- ).value
- end
-
- it 'does return job and root variables' do
- expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root', 'D' => 'root' },
- job_variables: { 'A' => 'job', 'B' => 'job' },
- root_variables_inheritance: true
- )
- end
-
- context 'when inherit of defaults is disabled' do
- let(:config) do
- {
- variables: { A: 'job', B: 'job' },
- inherit: { variables: false }
- }
- end
-
- it 'does return job and root variables' do
- expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job' },
- job_variables: { 'A' => 'job', 'B' => 'job' },
- root_variables_inheritance: false
- )
- end
- end
-
- context 'when inherit of only specific variable is enabled' do
- let(:config) do
- {
- variables: { A: 'job', B: 'job' },
- inherit: { variables: ['D'] }
- }
- end
-
- it 'does return job and root variables' do
- expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job', 'D' => 'root' },
- job_variables: { 'A' => 'job', 'B' => 'job' },
- root_variables_inheritance: ['D']
- )
- end
- end
- end
- end
-
context 'of default:tags' do
using RSpec::Parameterized::TableSyntax
@@ -493,7 +423,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] },
- variables: {},
job_variables: {},
root_variables_inheritance: true
)
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 31e3545e8d8..d862fbf5b78 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -132,7 +132,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
job_variables: {},
root_variables_inheritance: true,
ignore: false,
@@ -148,7 +147,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
job_variables: {},
root_variables_inheritance: true,
ignore: false,
@@ -166,7 +164,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' }],
only: { refs: %w(branches tags) },
- variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
job_variables: { 'VAR' => 'job' },
root_variables_inheritance: true,
after_script: [],
@@ -214,7 +211,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
- variables: { 'VAR' => 'root' },
job_variables: {},
root_variables_inheritance: true,
ignore: false,
@@ -228,7 +224,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
- variables: { 'VAR' => 'job' },
job_variables: { 'VAR' => 'job' },
root_variables_inheritance: true,
ignore: false,
diff --git a/spec/lib/gitlab/ci/config/entry/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
index 7d26365e7b3..91252378541 100644
--- a/spec/lib/gitlab/ci/config/entry/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
@@ -17,6 +17,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
describe '.new' do
subject { entry }
+ before do
+ subject.compose!
+ end
+
context 'with a list of rule rule' do
let(:config) do
[{ if: '$THIS == "that"', when: 'never' }]
@@ -24,14 +28,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
it { is_expected.to be_a(described_class) }
it { is_expected.to be_valid }
-
- context 'when composed' do
- before do
- subject.compose!
- end
-
- it { is_expected.to be_valid }
- end
end
context 'with a list of two rules' do
@@ -42,21 +38,34 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
]
end
- it { is_expected.to be_a(described_class) }
it { is_expected.to be_valid }
+ end
- context 'when composed' do
- before do
- subject.compose!
- end
+ context 'with a single rule object' do
+ let(:config) do
+ { if: '$SKIP', when: 'never' }
+ end
- it { is_expected.to be_valid }
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'with nested rules' do
+ let(:config) do
+ [
+ { if: '$THIS == "that"', when: 'always' },
+ [{ if: '$SKIP', when: 'never' }]
+ ]
end
+
+ it { is_expected.to be_valid }
end
- context 'with a single rule object' do
+ context 'with rules nested more than one level' do
let(:config) do
- { if: '$SKIP', when: 'never' }
+ [
+ { if: '$THIS == "that"', when: 'always' },
+ [{ if: '$SKIP', when: 'never' }, [{ if: '$THIS == "other"', when: 'aways' }]]
+ ]
end
it { is_expected.not_to be_valid }
@@ -90,7 +99,36 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
{ if: '$SKIP', when: 'never' }
end
- it { is_expected.to eq(config) }
+ it { is_expected.to eq([config]) }
+ end
+
+ context 'with nested rules' do
+ let(:first_rule) { { if: '$THIS == "that"', when: 'always' } }
+ let(:second_rule) { { if: '$SKIP', when: 'never' } }
+
+ let(:config) do
+ [
+ first_rule,
+ [second_rule]
+ ]
+ end
+
+ it { is_expected.to contain_exactly(first_rule, second_rule) }
+ end
+
+ context 'with rules nested more than one level' do
+ let(:first_rule) { { if: '$THIS == "that"', when: 'always' } }
+ let(:second_rule) { { if: '$SKIP', when: 'never' } }
+ let(:third_rule) { { if: '$THIS == "other"', when: 'aways' } }
+
+ let(:config) do
+ [
+ first_rule,
+ [second_rule, [third_rule]]
+ ]
+ end
+
+ it { is_expected.to contain_exactly(first_rule, second_rule, third_rule) }
end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 88097f3f56a..a471997e43a 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
- let(:context_params) { { project: project, sha: '123456', user: user, variables: project.predefined_variables.to_runner_variables } }
+ let(:context_params) { { project: project, sha: '123456', user: user, variables: project.predefined_variables } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:file_content) do
@@ -347,15 +347,51 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject.map(&:location)).to contain_exactly('myfolder/file1.yml', 'myfolder/file2.yml')
end
+ end
+
+ context "when 'include' has rules" do
+ let(:values) do
+ { include: [{ remote: remote_url },
+ { local: local_file, rules: [{ if: "$CI_PROJECT_ID == '#{project_id}'" }] }],
+ image: 'ruby:2.7' }
+ end
- context 'when the FF ci_wildcard_file_paths is disabled' do
- before do
- stub_feature_flags(ci_wildcard_file_paths: false)
+ context 'when the rules matches' do
+ let(:project_id) { project.id }
+
+ it 'includes the file' do
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
+ an_instance_of(Gitlab::Ci::Config::External::File::Local))
end
- it 'cannot find any file returns an error message' do
- expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Local))
- expect(subject[0].errors).to eq(['Local file `myfolder/*.yml` does not exist!'])
+ context 'when the FF ci_include_rules is disabled' do
+ before do
+ stub_feature_flags(ci_include_rules: false)
+ end
+
+ it 'includes the file' do
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
+ an_instance_of(Gitlab::Ci::Config::External::File::Local))
+ end
+ end
+ end
+
+ context 'when the rules does not match' do
+ let(:project_id) { non_existing_record_id }
+
+ it 'does not include the file' do
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote))
+ end
+
+ context 'when the FF ci_include_rules is disabled' do
+ before do
+ stub_feature_flags(ci_include_rules: false)
+ end
+
+ it 'includes the file' do
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
+ an_instance_of(Gitlab::Ci::Config::External::File::Local))
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
new file mode 100644
index 00000000000..89ea13d710d
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::External::Rules do
+ let(:rule_hashes) {}
+
+ subject(:rules) { described_class.new(rule_hashes) }
+
+ describe '#evaluate' do
+ let(:context) { double(variables: {}) }
+
+ subject(:result) { rules.evaluate(context).pass? }
+
+ context 'when there is no rule' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when there is a rule' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
+
+ context 'when the rule matches' do
+ let(:context) { double(variables: { MY_VAR: 'hello' }) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when the rule does not match' do
+ let(:context) { double(variables: { MY_VAR: 'invalid' }) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
index e5f0341c5fe..a29471706cc 100644
--- a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
@@ -50,10 +50,6 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
name: 'test: [aws, app1]',
instance: 1,
parallel: { total: 4 },
- variables: {
- 'PROVIDER' => 'aws',
- 'STACK' => 'app1'
- },
job_variables: {
'PROVIDER' => 'aws',
'STACK' => 'app1'
@@ -63,10 +59,6 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
name: 'test: [aws, app2]',
instance: 2,
parallel: { total: 4 },
- variables: {
- 'PROVIDER' => 'aws',
- 'STACK' => 'app2'
- },
job_variables: {
'PROVIDER' => 'aws',
'STACK' => 'app2'
@@ -76,10 +68,6 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
name: 'test: [ovh, app]',
instance: 3,
parallel: { total: 4 },
- variables: {
- 'PROVIDER' => 'ovh',
- 'STACK' => 'app'
- },
job_variables: {
'PROVIDER' => 'ovh',
'STACK' => 'app'
@@ -89,10 +77,6 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
name: 'test: [gcp, app]',
instance: 4,
parallel: { total: 4 },
- variables: {
- 'PROVIDER' => 'gcp',
- 'STACK' => 'app'
- },
job_variables: {
'PROVIDER' => 'gcp',
'STACK' => 'app'
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index 4c19657413c..354392eb42e 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Normalizer do
let(:job_name) { :rspec }
- let(:job_config) { { script: 'rspec', parallel: parallel_config, name: 'rspec', variables: variables_config } }
+ let(:job_config) { { script: 'rspec', parallel: parallel_config, name: 'rspec', job_variables: variables_config } }
let(:config) { { job_name => job_config } }
describe '.normalize_jobs' do
@@ -202,21 +202,21 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
it 'sets job variables', :aggregate_failures do
expect(subject.values[0]).to match(
- a_hash_including(variables: { VAR_1: 'A', VAR_2: 'B', USER_VARIABLE: 'user value' })
+ a_hash_including(job_variables: { VAR_1: 'A', VAR_2: 'B', USER_VARIABLE: 'user value' })
)
expect(subject.values[1]).to match(
- a_hash_including(variables: { VAR_1: 'A', VAR_2: 'C', USER_VARIABLE: 'user value' })
+ a_hash_including(job_variables: { VAR_1: 'A', VAR_2: 'C', USER_VARIABLE: 'user value' })
)
end
it 'parallelizes jobs with original config' do
configs = subject.values.map do |config|
- config.except(:name, :instance, :variables)
+ config.except(:name, :instance, :job_variables)
end
original_config = config[job_name]
- .except(:name, :variables)
+ .except(:name, :job_variables)
.deep_merge(parallel: { total: 2 })
expect(configs).to all(match(a_hash_including(original_config)))
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 45ce4cac6c4..3ec4519748f 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -286,7 +286,9 @@ RSpec.describe Gitlab::Ci::Config do
end
context "when using 'include' directive" do
- let(:project) { create(:project, :repository) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, group: group) }
+ let(:main_project) { create(:project, :repository, :public, group: group) }
let(:remote_location) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:local_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
@@ -317,7 +319,9 @@ RSpec.describe Gitlab::Ci::Config do
include:
- #{local_location}
- #{remote_location}
-
+ - project: '$MAIN_PROJECT'
+ ref: '$REF'
+ file: '$FILENAME'
image: ruby:2.7
HEREDOC
end
@@ -331,6 +335,26 @@ RSpec.describe Gitlab::Ci::Config do
allow(project.repository)
.to receive(:blob_data_at).and_return(local_file_content)
+
+ main_project.repository.create_file(
+ main_project.creator,
+ '.gitlab-ci.yml',
+ local_file_content,
+ message: 'Add README.md',
+ branch_name: 'master'
+ )
+
+ main_project.repository.create_file(
+ main_project.creator,
+ '.another-ci-file.yml',
+ local_file_content,
+ message: 'Add README.md',
+ branch_name: 'master'
+ )
+
+ create(:ci_variable, project: project, key: "REF", value: "HEAD")
+ create(:ci_group_variable, group: group, key: "FILENAME", value: ".gitlab-ci.yml")
+ create(:ci_instance_variable, key: 'MAIN_PROJECT', value: main_project.full_path)
end
context "when gitlab_ci_yml has valid 'include' defined" do
@@ -344,6 +368,38 @@ RSpec.describe Gitlab::Ci::Config do
expect(config.to_hash).to eq(composed_hash)
end
+
+ context 'handling variables' do
+ it 'contains all project variables' do
+ ref = config.context.variables.find { |v| v[:key] == 'REF' }
+
+ expect(ref[:value]).to eq("HEAD")
+ end
+
+ it 'contains all group variables' do
+ filename = config.context.variables.find { |v| v[:key] == 'FILENAME' }
+
+ expect(filename[:value]).to eq(".gitlab-ci.yml")
+ end
+
+ it 'contains all instance variables' do
+ project = config.context.variables.find { |v| v[:key] == 'MAIN_PROJECT' }
+
+ expect(project[:value]).to eq(main_project.full_path)
+ end
+
+ context 'overriding a group variable at project level' do
+ before do
+ create(:ci_variable, project: project, key: "FILENAME", value: ".another-ci-file.yml")
+ end
+
+ it 'successfully overrides' do
+ filename = config.context.variables.to_hash[:FILENAME]
+
+ expect(filename).to eq('.another-ci-file.yml')
+ end
+ end
+ end
end
context "when gitlab_ci.yml has invalid 'include' defined" do
@@ -667,5 +723,33 @@ RSpec.describe Gitlab::Ci::Config do
expect(config.to_hash).to eq(composed_hash)
end
end
+
+ context "when an 'include' has rules" do
+ let(:gitlab_ci_yml) do
+ <<~HEREDOC
+ include:
+ - local: #{local_location}
+ rules:
+ - if: $CI_PROJECT_ID == "#{project_id}"
+ image: ruby:2.7
+ HEREDOC
+ end
+
+ context 'when the rules condition is satisfied' do
+ let(:project_id) { project.id }
+
+ it 'includes the file' do
+ expect(config.to_hash).to include(local_location_hash)
+ end
+ end
+
+ context 'when the rules condition is satisfied' do
+ let(:project_id) { non_existing_record_id }
+
+ it 'does not include the file' do
+ expect(config.to_hash).not_to include(local_location_hash)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index 77f6608eb85..1e433d7854a 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Lint do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:lint) { described_class.new(project: project, current_user: user) }
@@ -89,6 +89,15 @@ RSpec.describe Gitlab::Ci::Lint do
)
end
+ after do
+ project.repository.delete_file(
+ project.creator,
+ 'another-gitlab-ci.yml',
+ message: 'Remove another-gitlab-ci.yml',
+ branch_name: 'master'
+ )
+ end
+
it 'sets merged_config' do
root_config = YAML.safe_load(content, [Symbol])
included_config = YAML.safe_load(included_content, [Symbol])
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
new file mode 100644
index 00000000000..c6387bf615b
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -0,0 +1,350 @@
+# frozen_string_literal: true
+
+# TODO remove duplication from spec/lib/gitlab/ci/parsers/security/common_spec.rb and spec/lib/gitlab/ci/parsers/security/common_spec.rb
+# See https://gitlab.com/gitlab-org/gitlab/-/issues/336589
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Security::Common do
+ describe '#parse!' do
+ where(vulnerability_finding_signatures_enabled: [true, false])
+ with_them do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ let(:artifact) { build(:ci_job_artifact, :common_security_report) }
+ let(:report) { Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago) }
+ # The path 'yarn.lock' was initially used by DependencyScanning, it is okay for SAST locations to use it, but this could be made better
+ let(:location) { ::Gitlab::Ci::Reports::Security::Locations::Sast.new(file_path: 'yarn.lock', start_line: 1, end_line: 1) }
+ let(:tracking_data) { nil }
+
+ before do
+ allow_next_instance_of(described_class) do |parser|
+ allow(parser).to receive(:create_location).and_return(location)
+ allow(parser).to receive(:tracking_data).and_return(tracking_data)
+ end
+
+ artifact.each_blob { |blob| described_class.parse!(blob, report, vulnerability_finding_signatures_enabled) }
+ end
+
+ describe 'schema validation' do
+ let(:validator_class) { Gitlab::Ci::Parsers::Security::Validators::SchemaValidator }
+ let(:parser) { described_class.new('{}', report, vulnerability_finding_signatures_enabled, validate: validate) }
+
+ subject(:parse_report) { parser.parse! }
+
+ before do
+ allow(validator_class).to receive(:new).and_call_original
+ end
+
+ context 'when the validate flag is set as `false`' do
+ let(:validate) { false }
+
+ it 'does not run the validation logic' do
+ parse_report
+
+ expect(validator_class).not_to have_received(:new)
+ end
+ end
+
+ context 'when the validate flag is set as `true`' do
+ let(:validate) { true }
+ let(:valid?) { false }
+
+ before do
+ allow_next_instance_of(validator_class) do |instance|
+ allow(instance).to receive(:valid?).and_return(valid?)
+ allow(instance).to receive(:errors).and_return(['foo'])
+ end
+
+ allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ end
+
+ it 'instantiates the validator with correct params' do
+ parse_report
+
+ expect(validator_class).to have_received(:new).with(report.type, {})
+ end
+
+ context 'when the report data is not valid according to the schema' do
+ it 'adds errors to the report' do
+ expect { parse_report }.to change { report.errors }.from([]).to([{ message: 'foo', type: 'Schema' }])
+ end
+
+ it 'does not try to create report entities' do
+ parse_report
+
+ expect(parser).not_to have_received(:create_scanner)
+ expect(parser).not_to have_received(:create_scan)
+ end
+ end
+
+ context 'when the report data is valid according to the schema' do
+ let(:valid?) { true }
+
+ it 'does not add errors to the report' do
+ expect { parse_report }.not_to change { report.errors }.from([])
+ end
+
+ it 'keeps the execution flow as normal' do
+ parse_report
+
+ expect(parser).to have_received(:create_scanner)
+ expect(parser).to have_received(:create_scan)
+ end
+ end
+ end
+ end
+
+ describe 'parsing finding.name' do
+ let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
+
+ context 'when message is provided' do
+ it 'sets message from the report as a finding name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
+
+ expect(finding.name).to eq(expected_name)
+ end
+ end
+
+ context 'when message is not provided' do
+ context 'and name is provided' do
+ it 'sets name from the report as a name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+
+ expect(finding.name).to eq(expected_name)
+ end
+ end
+
+ context 'and name is not provided' do
+ context 'when CVE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
+ end
+ end
+
+ context 'when CWE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
+ end
+ end
+
+ context 'when neither CVE nor CWE identifier exist' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ expect(finding.name).to eq("other-2017-11429 in yarn.lock")
+ end
+ end
+ end
+ end
+ end
+
+ describe 'parsing finding.details' do
+ context 'when details are provided' do
+ it 'sets details from the report' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
+ expected_details = Gitlab::Json.parse(finding.raw_metadata)['details']
+
+ expect(finding.details).to eq(expected_details)
+ end
+ end
+
+ context 'when details are not provided' do
+ it 'sets empty hash' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expect(finding.details).to eq({})
+ end
+ end
+ end
+
+ describe 'top-level scanner' do
+ it 'is the primary scanner' do
+ expect(report.primary_scanner.external_id).to eq('gemnasium')
+ expect(report.primary_scanner.name).to eq('Gemnasium')
+ expect(report.primary_scanner.vendor).to eq('GitLab')
+ expect(report.primary_scanner.version).to eq('2.18.0')
+ end
+
+ it 'returns nil report has no scanner' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
+
+ expect(empty_report.primary_scanner).to be_nil
+ end
+ end
+
+ describe 'parsing scanners' do
+ subject(:scanner) { report.findings.first.scanner }
+
+ context 'when vendor is not missing in scanner' do
+ it 'returns scanner with parsed vendor value' do
+ expect(scanner.vendor).to eq('GitLab')
+ end
+ end
+ end
+
+ describe 'parsing scan' do
+ it 'returns scan object for each finding' do
+ scans = report.findings.map(&:scan)
+
+ expect(scans.map(&:status).all?('success')).to be(true)
+ expect(scans.map(&:start_time).all?('placeholder-value')).to be(true)
+ expect(scans.map(&:end_time).all?('placeholder-value')).to be(true)
+ expect(scans.size).to eq(3)
+ expect(scans.first).to be_a(::Gitlab::Ci::Reports::Security::Scan)
+ end
+
+ it 'returns nil when scan is not a hash' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
+
+ expect(empty_report.scan).to be(nil)
+ end
+ end
+
+ describe 'parsing schema version' do
+ it 'parses the version' do
+ expect(report.version).to eq('14.0.2')
+ end
+
+ it 'returns nil when there is no version' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
+
+ expect(empty_report.version).to be_nil
+ end
+ end
+
+ describe 'parsing analyzer' do
+ it 'associates analyzer with report' do
+ expect(report.analyzer.id).to eq('common-analyzer')
+ expect(report.analyzer.name).to eq('Common Analyzer')
+ expect(report.analyzer.version).to eq('2.0.1')
+ expect(report.analyzer.vendor).to eq('Common')
+ end
+
+ it 'returns nil when analyzer data is not available' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
+
+ expect(empty_report.analyzer).to be_nil
+ end
+ end
+
+ describe 'parsing links' do
+ it 'returns links object for each finding', :aggregate_failures do
+ links = report.findings.flat_map(&:links)
+
+ expect(links.map(&:url)).to match_array(['https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030'])
+ expect(links.map(&:name)).to match_array([nil, 'CVE-1030'])
+ expect(links.size).to eq(2)
+ expect(links.first).to be_a(::Gitlab::Ci::Reports::Security::Link)
+ end
+ end
+
+ describe 'setting the uuid' do
+ let(:finding_uuids) { report.findings.map(&:uuid) }
+ let(:uuid_1) do
+ Security::VulnerabilityUUID.generate(
+ report_type: "sast",
+ primary_identifier_fingerprint: report.findings[0].identifiers.first.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: pipeline.project_id
+ )
+ end
+
+ let(:uuid_2) do
+ Security::VulnerabilityUUID.generate(
+ report_type: "sast",
+ primary_identifier_fingerprint: report.findings[1].identifiers.first.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: pipeline.project_id
+ )
+ end
+
+ let(:expected_uuids) { [uuid_1, uuid_2, nil] }
+
+ it 'sets the UUIDv5 for findings', :aggregate_failures do
+ allow_next_instance_of(Gitlab::Ci::Reports::Security::Report) do |report|
+ allow(report).to receive(:type).and_return('sast')
+
+ expect(finding_uuids).to match_array(expected_uuids)
+ end
+ end
+ end
+
+ describe 'parsing tracking' do
+ let(:tracking_data) do
+ {
+ 'type' => 'source',
+ 'items' => [
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
+ ]
+ ]
+ }
+ end
+
+ context 'with valid tracking information' do
+ it 'creates signatures for each algorithm' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(3)
+ expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location', 'scope_offset'])
+ end
+ end
+
+ context 'with invalid tracking information' do
+ let(:tracking_data) do
+ {
+ 'type' => 'source',
+ 'items' => [
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
+ ]
+ ]
+ }
+ end
+
+ it 'ignores invalid algorithm types' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(2)
+ expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location'])
+ end
+ end
+
+ context 'with valid tracking information' do
+ it 'creates signatures for each signature algorithm' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(3)
+ expect(finding.signatures.map(&:algorithm_type)).to eq(%w[hash location scope_offset])
+
+ signatures = finding.signatures.index_by(&:algorithm_type)
+ expected_values = tracking_data['items'][0]['signatures'].index_by { |x| x['algorithm'] }
+ expect(signatures['hash'].signature_value).to eq(expected_values['hash']['value'])
+ expect(signatures['location'].signature_value).to eq(expected_values['location']['value'])
+ expect(signatures['scope_offset'].signature_value).to eq(expected_values['scope_offset']['value'])
+ end
+
+ it 'sets the uuid according to the higest priority signature' do
+ finding = report.findings.first
+ highest_signature = finding.signatures.max_by(&:priority)
+
+ identifiers = if vulnerability_finding_signatures_enabled
+ "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{highest_signature.signature_hex}-#{report.project_id}"
+ else
+ "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{finding.location.fingerprint}-#{report.project_id}"
+ end
+
+ expect(finding.uuid).to eq(Gitlab::UUID.v5(identifiers))
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/sast_spec.rb b/spec/lib/gitlab/ci/parsers/security/sast_spec.rb
new file mode 100644
index 00000000000..4bc48f6611a
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/security/sast_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Security::Sast do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#parse!' do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ let(:created_at) { 2.weeks.ago }
+
+ context "when parsing valid reports" do
+ where(:report_format, :report_version, :scanner_length, :finding_length, :identifier_length, :file_path, :line) do
+ :sast | '14.0.0' | 1 | 5 | 6 | 'groovy/src/main/java/com/gitlab/security_products/tests/App.groovy' | 47
+ :sast_deprecated | '1.2' | 3 | 33 | 17 | 'python/hardcoded/hardcoded-tmp.py' | 1
+ end
+
+ with_them do
+ let(:report) { Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, created_at) }
+ let(:artifact) { create(:ci_job_artifact, report_format) }
+
+ before do
+ artifact.each_blob { |blob| described_class.parse!(blob, report) }
+ end
+
+ it "parses all identifiers and findings" do
+ expect(report.findings.length).to eq(finding_length)
+ expect(report.identifiers.length).to eq(identifier_length)
+ expect(report.scanners.length).to eq(scanner_length)
+ end
+
+ it 'generates expected location' do
+ location = report.findings.first.location
+
+ expect(location).to be_a(::Gitlab::Ci::Reports::Security::Locations::Sast)
+ expect(location).to have_attributes(
+ file_path: file_path,
+ end_line: line,
+ start_line: line
+ )
+ end
+
+ it "generates expected metadata_version" do
+ expect(report.findings.first.metadata_version).to eq(report_version)
+ end
+ end
+ end
+
+ context "when parsing an empty report" do
+ let(:report) { Gitlab::Ci::Reports::Security::Report.new('sast', pipeline, created_at) }
+ let(:blob) { Gitlab::Json.generate({}) }
+
+ it { expect(described_class.parse!(blob, report)).to be_empty }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
new file mode 100644
index 00000000000..1d361e16aad
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Security::SecretDetection do
+ describe '#parse!' do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ let(:created_at) { 2.weeks.ago }
+
+ context "when parsing valid reports" do
+ where(report_format: %i(secret_detection))
+
+ with_them do
+ let(:report) { Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, created_at) }
+ let(:artifact) { create(:ci_job_artifact, report_format) }
+
+ before do
+ artifact.each_blob { |blob| described_class.parse!(blob, report) }
+ end
+
+ it "parses all identifiers and findings" do
+ expect(report.findings.length).to eq(1)
+ expect(report.identifiers.length).to eq(1)
+ expect(report.scanners.length).to eq(1)
+ end
+
+ it 'generates expected location' do
+ location = report.findings.first.location
+
+ expect(location).to be_a(::Gitlab::Ci::Reports::Security::Locations::SecretDetection)
+ expect(location).to have_attributes(
+ file_path: 'aws-key.py',
+ start_line: nil,
+ end_line: nil,
+ class_name: nil,
+ method_name: nil
+ )
+ end
+
+ it "generates expected metadata_version" do
+ expect(report.findings.first.metadata_version).to eq('3.0')
+ end
+ end
+ end
+
+ context "when parsing an empty report" do
+ let(:report) { Gitlab::Ci::Reports::Security::Report.new('secret_detection', pipeline, created_at) }
+ let(:blob) { Gitlab::Json.generate({}) }
+
+ it { expect(described_class.parse!(blob, report)).to be_empty }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
new file mode 100644
index 00000000000..f434ffd12bf
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:report_type, :expected_errors, :valid_data) do
+ :sast | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ :secret_detection | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ end
+
+ with_them do
+ let(:validator) { described_class.new(report_type, report_data) }
+
+ describe '#valid?' do
+ subject { validator.valid? }
+
+ context 'when given data is invalid according to the schema' do
+ let(:report_data) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when given data is valid according to the schema' do
+ let(:report_data) { valid_data }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#errors' do
+ let(:report_data) { { 'version' => '10.0.0' } }
+
+ subject { validator.errors }
+
+ it { is_expected.to eq(expected_errors) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 2e73043e309..c22a0e23794 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -295,31 +295,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
end
end
- describe '#dangling_build?' do
- let(:project) { create(:project, :repository) }
- let(:command) { described_class.new(project: project, source: source) }
-
- subject { command.dangling_build? }
-
- context 'when source is :webide' do
- let(:source) { :webide }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when source is :ondemand_dast_scan' do
- let(:source) { :ondemand_dast_scan }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when source something else' do
- let(:source) { :web }
-
- it { is_expected.to eq(false) }
- end
- end
-
describe '#creates_child_pipeline?' do
let(:command) { described_class.new(bridge: bridge) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
index 499dc3554a3..1aa104310af 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::Deployments do
end
it 'logs the error' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(Gitlab::Ci::Limit::LimitExceededError),
project_id: project.id, plan: namespace.actual_plan_name
)
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index cc4aaffb0a4..83d47ae6819 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
let_it_be(:user) { create(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline) }
- let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new }
+ let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project) }
let(:first_step) { spy('first step') }
let(:second_step) { spy('second step') }
let(:sequence) { [first_step, second_step] }
@@ -71,5 +71,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
expect(histogram).to have_received(:observe)
.with({ source: 'push' }, 0)
end
+
+ it 'records active jobs by pipeline plan in a histogram' do
+ allow(command.metrics)
+ .to receive(:active_jobs_histogram)
+ .and_return(histogram)
+
+ pipeline = create(:ci_pipeline, project: project, status: :running)
+ create(:ci_build, :finished, project: project, pipeline: pipeline)
+ create(:ci_build, :failed, project: project, pipeline: pipeline)
+ create(:ci_build, :running, project: project, pipeline: pipeline)
+ subject.build!
+
+ expect(histogram).to have_received(:observe)
+ .with(hash_including(plan: project.actual_plan_name), 3)
+ end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/aggregated_report_spec.rb b/spec/lib/gitlab/ci/reports/security/aggregated_report_spec.rb
new file mode 100644
index 00000000000..c56177a6453
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/aggregated_report_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::AggregatedReport do
+ subject { described_class.new(reports, findings) }
+
+ let(:reports) { build_list(:ci_reports_security_report, 1) }
+ let(:findings) { build_list(:ci_reports_security_finding, 1) }
+
+ describe '#created_at' do
+ context 'no reports' do
+ let(:reports) { [] }
+
+ it 'has no created date' do
+ expect(subject.created_at).to be_nil
+ end
+ end
+
+ context 'report with no created date' do
+ let(:reports) { build_list(:ci_reports_security_report, 1, created_at: nil) }
+
+ it 'has no created date' do
+ expect(subject.created_at).to be_nil
+ end
+ end
+
+ context 'has reports' do
+ let(:a_long_time_ago) { 2.months.ago }
+ let(:a_while_ago) { 2.weeks.ago }
+ let(:yesterday) { 1.day.ago }
+
+ let(:reports) do
+ [build(:ci_reports_security_report, created_at: a_while_ago),
+ build(:ci_reports_security_report, created_at: a_long_time_ago),
+ build(:ci_reports_security_report, created_at: nil),
+ build(:ci_reports_security_report, created_at: yesterday)]
+ end
+
+ it 'has oldest created date' do
+ expect(subject.created_at).to eq(a_long_time_ago)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/finding_key_spec.rb b/spec/lib/gitlab/ci/reports/security/finding_key_spec.rb
new file mode 100644
index 00000000000..784c1183320
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/finding_key_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::FindingKey do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#==' do
+ where(:location_fp_1, :location_fp_2, :identifier_fp_1, :identifier_fp_2, :equals?) do
+ nil | 'different location fp' | 'identifier fp' | 'different identifier fp' | false
+ 'location fp' | nil | 'identifier fp' | 'different identifier fp' | false
+ 'location fp' | 'different location fp' | nil | 'different identifier fp' | false
+ 'location fp' | 'different location fp' | 'identifier fp' | nil | false
+ nil | nil | 'identifier fp' | 'identifier fp' | false
+ 'location fp' | 'location fp' | nil | nil | false
+ nil | nil | nil | nil | false
+ 'location fp' | 'different location fp' | 'identifier fp' | 'different identifier fp' | false
+ 'location fp' | 'different location fp' | 'identifier fp' | 'identifier fp' | false
+ 'location fp' | 'location fp' | 'identifier fp' | 'different identifier fp' | false
+ 'location fp' | 'location fp' | 'identifier fp' | 'identifier fp' | true
+ end
+
+ with_them do
+ let(:finding_key_1) do
+ build(:ci_reports_security_finding_key,
+ location_fingerprint: location_fp_1,
+ identifier_fingerprint: identifier_fp_1)
+ end
+
+ let(:finding_key_2) do
+ build(:ci_reports_security_finding_key,
+ location_fingerprint: location_fp_2,
+ identifier_fingerprint: identifier_fp_2)
+ end
+
+ subject { finding_key_1 == finding_key_2 }
+
+ it { is_expected.to be(equals?) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/finding_signature_spec.rb b/spec/lib/gitlab/ci/reports/security/finding_signature_spec.rb
new file mode 100644
index 00000000000..23e6b40a039
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/finding_signature_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::FindingSignature do
+ subject { described_class.new(params.with_indifferent_access) }
+
+ let(:params) do
+ {
+ algorithm_type: 'hash',
+ signature_value: 'SIGNATURE'
+ }
+ end
+
+ describe '#initialize' do
+ context 'when a supported algorithm type is given' do
+ it 'allows itself to be created' do
+ expect(subject.algorithm_type).to eq(params[:algorithm_type])
+ expect(subject.signature_value).to eq(params[:signature_value])
+ end
+
+ describe '#valid?' do
+ it 'returns true' do
+ expect(subject.valid?).to eq(true)
+ end
+ end
+ end
+ end
+
+ describe '#valid?' do
+ context 'when supported algorithm_type is given' do
+ it 'is valid' do
+ expect(subject.valid?).to eq(true)
+ end
+ end
+
+ context 'when an unsupported algorithm_type is given' do
+ let(:params) do
+ {
+ algorithm_type: 'INVALID',
+ signature_value: 'SIGNATURE'
+ }
+ end
+
+ it 'is not valid' do
+ expect(subject.valid?).to eq(false)
+ end
+ end
+ end
+
+ describe '#to_hash' do
+ it 'returns a hash representation of the signature' do
+ expect(subject.to_hash).to eq(
+ algorithm_type: params[:algorithm_type],
+ signature_sha: Digest::SHA1.digest(params[:signature_value])
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/locations/sast_spec.rb b/spec/lib/gitlab/ci/reports/security/locations/sast_spec.rb
new file mode 100644
index 00000000000..effa7a60400
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/locations/sast_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Locations::Sast do
+ let(:params) do
+ {
+ file_path: 'src/main/App.java',
+ start_line: 29,
+ end_line: 31,
+ class_name: 'com.gitlab.security_products.tests.App',
+ method_name: 'insecureCypher'
+ }
+ end
+
+ let(:mandatory_params) { %i[file_path start_line] }
+ let(:expected_fingerprint) { Digest::SHA1.hexdigest('src/main/App.java:29:31') }
+ let(:expected_fingerprint_path) { 'App.java' }
+
+ it_behaves_like 'vulnerability location'
+end
diff --git a/spec/lib/gitlab/ci/reports/security/locations/secret_detection_spec.rb b/spec/lib/gitlab/ci/reports/security/locations/secret_detection_spec.rb
new file mode 100644
index 00000000000..3b84a548713
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/locations/secret_detection_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Locations::SecretDetection do
+ let(:params) do
+ {
+ file_path: 'src/main/App.java',
+ start_line: 29,
+ end_line: 31,
+ class_name: 'com.gitlab.security_products.tests.App',
+ method_name: 'insecureCypher'
+ }
+ end
+
+ let(:mandatory_params) { %i[file_path start_line] }
+ let(:expected_fingerprint) { Digest::SHA1.hexdigest('src/main/App.java:29:31') }
+ let(:expected_fingerprint_path) { 'App.java' }
+
+ it_behaves_like 'vulnerability location'
+end
diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb
new file mode 100644
index 00000000000..5a85c3f19fc
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Report do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ let(:created_at) { 2.weeks.ago }
+
+ subject(:report) { described_class.new('sast', pipeline, created_at) }
+
+ it { expect(report.type).to eq('sast') }
+ it { is_expected.to delegate_method(:project_id).to(:pipeline) }
+
+ describe '#add_scanner' do
+ let(:scanner) { create(:ci_reports_security_scanner, external_id: 'find_sec_bugs') }
+
+ subject { report.add_scanner(scanner) }
+
+ it 'stores given scanner params in the map' do
+ subject
+
+ expect(report.scanners).to eq({ 'find_sec_bugs' => scanner })
+ end
+
+ it 'returns the added scanner' do
+ expect(subject).to eq(scanner)
+ end
+ end
+
+ describe '#add_identifier' do
+ let(:identifier) { create(:ci_reports_security_identifier) }
+
+ subject { report.add_identifier(identifier) }
+
+ it 'stores given identifier params in the map' do
+ subject
+
+ expect(report.identifiers).to eq({ identifier.fingerprint => identifier })
+ end
+
+ it 'returns the added identifier' do
+ expect(subject).to eq(identifier)
+ end
+ end
+
+ describe '#add_finding' do
+ let(:finding) { create(:ci_reports_security_finding) }
+
+ it 'enriches given finding and stores it in the collection' do
+ report.add_finding(finding)
+
+ expect(report.findings).to eq([finding])
+ end
+ end
+
+ describe '#clone_as_blank' do
+ let(:report) do
+ create(
+ :ci_reports_security_report,
+ findings: [create(:ci_reports_security_finding)],
+ scanners: [create(:ci_reports_security_scanner)],
+ identifiers: [create(:ci_reports_security_identifier)]
+ )
+ end
+
+ it 'creates a blank report with copied type and pipeline' do
+ clone = report.clone_as_blank
+
+ expect(clone.type).to eq(report.type)
+ expect(clone.pipeline).to eq(report.pipeline)
+ expect(clone.created_at).to eq(report.created_at)
+ expect(clone.findings).to eq([])
+ expect(clone.scanners).to eq({})
+ expect(clone.identifiers).to eq({})
+ end
+ end
+
+ describe '#replace_with!' do
+ let(:report) do
+ create(
+ :ci_reports_security_report,
+ findings: [create(:ci_reports_security_finding)],
+ scanners: [create(:ci_reports_security_scanner)],
+ identifiers: [create(:ci_reports_security_identifier)]
+ )
+ end
+
+ let(:other_report) do
+ create(
+ :ci_reports_security_report,
+ findings: [create(:ci_reports_security_finding, compare_key: 'other_finding')],
+ scanners: [create(:ci_reports_security_scanner, external_id: 'other_scanner', name: 'Other Scanner')],
+ identifiers: [create(:ci_reports_security_identifier, external_id: 'other_id', name: 'other_scanner')]
+ )
+ end
+
+ before do
+ report.replace_with!(other_report)
+ end
+
+ it 'replaces report contents with other reports contents' do
+ expect(report.findings).to eq(other_report.findings)
+ expect(report.scanners).to eq(other_report.scanners)
+ expect(report.identifiers).to eq(other_report.identifiers)
+ end
+ end
+
+ describe '#merge!' do
+ let(:merged_report) { double('Report') }
+
+ before do
+ merge_reports_service = double('MergeReportsService')
+
+ allow(::Security::MergeReportsService).to receive(:new).and_return(merge_reports_service)
+ allow(merge_reports_service).to receive(:execute).and_return(merged_report)
+ allow(report).to receive(:replace_with!)
+ end
+
+ subject { report.merge!(described_class.new('sast', pipeline, created_at)) }
+
+ it 'invokes the merge with other report and then replaces this report contents by merge result' do
+ subject
+
+ expect(report).to have_received(:replace_with!).with(merged_report)
+ end
+ end
+
+ describe '#primary_scanner' do
+ let(:scanner_1) { create(:ci_reports_security_scanner, external_id: 'external_id_1') }
+ let(:scanner_2) { create(:ci_reports_security_scanner, external_id: 'external_id_2') }
+
+ subject { report.primary_scanner }
+
+ before do
+ report.add_scanner(scanner_1)
+ report.add_scanner(scanner_2)
+ end
+
+ it { is_expected.to eq(scanner_1) }
+ end
+
+ describe '#add_error' do
+ context 'when the message is not given' do
+ it 'adds a new error to report with the generic error message' do
+ expect { report.add_error('foo') }.to change { report.errors }
+ .from([])
+ .to([{ type: 'foo', message: 'An unexpected error happened!' }])
+ end
+ end
+
+ context 'when the message is given' do
+ it 'adds a new error to report' do
+ expect { report.add_error('foo', 'bar') }.to change { report.errors }
+ .from([])
+ .to([{ type: 'foo', message: 'bar' }])
+ end
+ end
+ end
+
+ describe 'errored?' do
+ subject { report.errored? }
+
+ context 'when the report does not have any errors' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the report has errors' do
+ before do
+ report.add_error('foo', 'bar')
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#primary_scanner_order_to' do
+ let(:scanner_1) { build(:ci_reports_security_scanner) }
+ let(:scanner_2) { build(:ci_reports_security_scanner) }
+ let(:report_1) { described_class.new('sast', pipeline, created_at) }
+ let(:report_2) { described_class.new('sast', pipeline, created_at) }
+
+ subject(:compare_based_on_primary_scanners) { report_1.primary_scanner_order_to(report_2) }
+
+ context 'when the primary scanner of the receiver is nil' do
+ context 'when the primary scanner of the other is nil' do
+ it { is_expected.to be(1) }
+ end
+
+ context 'when the primary scanner of the other is not nil' do
+ before do
+ report_2.add_scanner(scanner_2)
+ end
+
+ it { is_expected.to be(1) }
+ end
+ end
+
+ context 'when the primary scanner of the receiver is not nil' do
+ before do
+ report_1.add_scanner(scanner_1)
+ end
+
+ context 'when the primary scanner of the other is nil' do
+ let(:scanner_2) { nil }
+
+ it { is_expected.to be(-1) }
+ end
+
+ context 'when the primary scanner of the other is not nil' do
+ before do
+ report_2.add_scanner(scanner_2)
+
+ allow(scanner_1).to receive(:<=>).and_return(0)
+ end
+
+ it 'compares two scanners' do
+ expect(compare_based_on_primary_scanners).to be(0)
+ expect(scanner_1).to have_received(:<=>).with(scanner_2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/reports_spec.rb b/spec/lib/gitlab/ci/reports/security/reports_spec.rb
new file mode 100644
index 00000000000..9b1e02f1418
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/reports_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Reports do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:artifact) { create(:ci_job_artifact, :sast) }
+
+ let(:security_reports) { described_class.new(pipeline) }
+
+ describe '#get_report' do
+ subject { security_reports.get_report(report_type, artifact) }
+
+ context 'when report type is sast' do
+ let(:report_type) { 'sast' }
+
+ it { expect(subject.type).to eq('sast') }
+ it { expect(subject.created_at).to eq(artifact.created_at) }
+
+ it 'initializes a new report and returns it' do
+ expect(Gitlab::Ci::Reports::Security::Report).to receive(:new)
+ .with('sast', pipeline, artifact.created_at).and_call_original
+
+ is_expected.to be_a(Gitlab::Ci::Reports::Security::Report)
+ end
+
+ context 'when report type is already allocated' do
+ before do
+ subject
+ end
+
+ it 'does not initialize a new report' do
+ expect(Gitlab::Ci::Reports::Security::Report).not_to receive(:new)
+
+ is_expected.to be_a(Gitlab::Ci::Reports::Security::Report)
+ end
+ end
+ end
+ end
+
+ describe '#findings' do
+ let(:finding_1) { build(:ci_reports_security_finding, severity: 'low') }
+ let(:finding_2) { build(:ci_reports_security_finding, severity: 'high') }
+ let!(:expected_findings) { [finding_1, finding_2] }
+
+ subject { security_reports.findings }
+
+ before do
+ security_reports.get_report('sast', artifact).add_finding(finding_1)
+ security_reports.get_report('dependency_scanning', artifact).add_finding(finding_2)
+ end
+
+ it { is_expected.to match_array(expected_findings) }
+ end
+
+ describe "#violates_default_policy_against?" do
+ let(:high_severity_dast) { build(:ci_reports_security_finding, severity: 'high', report_type: :dast) }
+ let(:vulnerabilities_allowed) { 0 }
+ let(:severity_levels) { %w(critical high) }
+
+ subject { security_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels) }
+
+ before do
+ security_reports.get_report('sast', artifact).add_finding(high_severity_dast)
+ end
+
+ context 'when the target_reports is `nil`' do
+ let(:target_reports) { nil }
+
+ context 'with severity levels matching the existing vulnerabilities' do
+ it { is_expected.to be(true) }
+ end
+
+ context "without any severity levels matching the existing vulnerabilities" do
+ let(:severity_levels) { %w(critical) }
+
+ it { is_expected.to be(false) }
+ end
+ end
+
+ context 'when the target_reports is not `nil`' do
+ let(:target_reports) { described_class.new(pipeline) }
+
+ context "when a report has a new unsafe vulnerability" do
+ context 'with severity levels matching the existing vulnerabilities' do
+ it { is_expected.to be(true) }
+ end
+
+ it { is_expected.to be(true) }
+
+ context 'with vulnerabilities_allowed higher than the number of new vulnerabilities' do
+ let(:vulnerabilities_allowed) { 10000 }
+
+ it { is_expected.to be(false) }
+ end
+
+ context "without any severity levels matching the existing vulnerabilities" do
+ let(:severity_levels) { %w(critical) }
+
+ it { is_expected.to be(false) }
+ end
+ end
+
+ context "when none of the reports have a new unsafe vulnerability" do
+ before do
+ target_reports.get_report('sast', artifact).add_finding(high_severity_dast)
+ end
+
+ it { is_expected.to be(false) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb
new file mode 100644
index 00000000000..44e66fd9028
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::VulnerabilityReportsComparer do
+ let(:identifier) { build(:ci_reports_security_identifier) }
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:location_param) { build(:ci_reports_security_locations_sast, :dynamic) }
+ let(:vulnerability_params) { vuln_params(project.id, [identifier], confidence: :low, severity: :critical) }
+ let(:base_vulnerability) { build(:ci_reports_security_finding, location: location_param, **vulnerability_params) }
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability]) }
+
+ let(:head_vulnerability) { build(:ci_reports_security_finding, location: location_param, uuid: base_vulnerability.uuid, **vulnerability_params) }
+ let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability]) }
+
+ shared_context 'comparing reports' do
+ let(:vul_params) { vuln_params(project.id, [identifier]) }
+ let(:base_vulnerability) { build(:ci_reports_security_finding, :dynamic, **vul_params) }
+ let(:head_vulnerability) { build(:ci_reports_security_finding, :dynamic, **vul_params) }
+ let(:head_vul_findings) { [head_vulnerability, vuln] }
+ end
+
+ subject { described_class.new(project, base_report, head_report) }
+
+ where(vulnerability_finding_signatures: [true, false])
+
+ with_them do
+ before do
+ stub_licensed_features(vulnerability_finding_signatures: vulnerability_finding_signatures)
+ end
+
+ describe '#base_report_out_of_date' do
+ context 'no base report' do
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [], findings: []) }
+
+ it 'is not out of date' do
+ expect(subject.base_report_out_of_date).to be false
+ end
+ end
+
+ context 'base report older than one week' do
+ let(:report) { build(:ci_reports_security_report, created_at: 1.week.ago - 60.seconds) }
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [report]) }
+
+ it 'is not out of date' do
+ expect(subject.base_report_out_of_date).to be true
+ end
+ end
+
+ context 'base report less than one week old' do
+ let(:report) { build(:ci_reports_security_report, created_at: 1.week.ago + 60.seconds) }
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [report]) }
+
+ it 'is not out of date' do
+ expect(subject.base_report_out_of_date).to be false
+ end
+ end
+ end
+
+ describe '#added' do
+ let(:new_location) {build(:ci_reports_security_locations_sast, :dynamic) }
+ let(:vul_params) { vuln_params(project.id, [identifier], confidence: :high) }
+ let(:vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:critical], location: new_location, **vul_params) }
+ let(:low_vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:low], location: new_location, **vul_params) }
+
+ context 'with new vulnerability' do
+ let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln]) }
+
+ it 'points to source tree' do
+ expect(subject.added).to eq([vuln])
+ end
+ end
+
+ context 'when comparing reports with different fingerprints' do
+ include_context 'comparing reports'
+
+ let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: head_vul_findings) }
+
+ it 'does not find any overlap' do
+ expect(subject.added).to eq(head_vul_findings)
+ end
+ end
+
+ context 'order' do
+ let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln, low_vuln]) }
+
+ it 'does not change' do
+ expect(subject.added).to eq([vuln, low_vuln])
+ end
+ end
+ end
+
+ describe '#fixed' do
+ let(:vul_params) { vuln_params(project.id, [identifier]) }
+ let(:vuln) { build(:ci_reports_security_finding, :dynamic, **vul_params ) }
+ let(:medium_vuln) { build(:ci_reports_security_finding, confidence: ::Enums::Vulnerability.confidence_levels[:high], severity: Enums::Vulnerability.severity_levels[:medium], uuid: vuln.uuid, **vul_params) }
+
+ context 'with fixed vulnerability' do
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability, vuln]) }
+
+ it 'points to base tree' do
+ expect(subject.fixed).to eq([vuln])
+ end
+ end
+
+ context 'when comparing reports with different fingerprints' do
+ include_context 'comparing reports'
+
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability, vuln]) }
+
+ it 'does not find any overlap' do
+ expect(subject.fixed).to eq([base_vulnerability, vuln])
+ end
+ end
+
+ context 'order' do
+ let(:vul_findings) { [vuln, medium_vuln] }
+ let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [*vul_findings, base_vulnerability]) }
+
+ it 'does not change' do
+ expect(subject.fixed).to eq(vul_findings)
+ end
+ end
+ end
+
+ describe 'with empty vulnerabilities' do
+ let(:empty_report) { build(:ci_reports_security_aggregated_reports, reports: [], findings: []) }
+
+ it 'returns empty array when reports are not present' do
+ comparer = described_class.new(project, empty_report, empty_report)
+
+ expect(comparer.fixed).to eq([])
+ expect(comparer.added).to eq([])
+ end
+
+ it 'returns added vulnerability when base is empty and head is not empty' do
+ comparer = described_class.new(project, empty_report, head_report)
+
+ expect(comparer.fixed).to eq([])
+ expect(comparer.added).to eq([head_vulnerability])
+ end
+
+ it 'returns fixed vulnerability when head is empty and base is not empty' do
+ comparer = described_class.new(project, base_report, empty_report)
+
+ expect(comparer.fixed).to eq([base_vulnerability])
+ expect(comparer.added).to eq([])
+ end
+ end
+ end
+
+ def vuln_params(project_id, identifiers, confidence: :high, severity: :critical)
+ {
+ project_id: project_id,
+ report_type: :sast,
+ identifiers: identifiers,
+ confidence: ::Enums::Vulnerability.confidence_levels[confidence],
+ severity: ::Enums::Vulnerability.severity_levels[severity]
+ }
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
index 6bc8e261640..f8df2266689 100644
--- a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe '5-Minute-Production-App.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
index e8aeb93a2ba..ca6f6872f89 100644
--- a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:platform_target) { 'ECS' }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
index 053499344e1..bd701aec8fc 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -47,7 +47,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
- let(:pipeline) { service.execute(merge_request) }
+ let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event
diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
index b23457315cc..64243f2d205 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -47,7 +47,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
- let(:pipeline) { service.execute(merge_request) }
+ let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event
diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
index 1d137ef89e1..d377cf0c735 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -210,7 +210,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
- let(:pipeline) { service.execute(merge_request) }
+ let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event
diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
index 7fa8d906d07..db9d7496251 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -47,7 +47,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
- let(:pipeline) { service.execute(merge_request) }
+ let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event
diff --git a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
index 0811c07e896..4685d843ce0 100644
--- a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
- subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform/Base.latest') }
+RSpec.describe 'Terraform/Base.gitlab-ci.yml' do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform/Base') }
describe 'the created pipeline' do
let(:default_branch) { 'master' }
@@ -11,7 +11,7 @@ RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..e35f2eabe8e
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform/Base.latest') }
+
+ describe 'the created pipeline' do
+ let(:default_branch) { 'master' }
+ let(:pipeline_branch) { default_branch }
+ let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push).payload }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow(project).to receive(:default_branch).and_return(default_branch)
+ end
+
+ it 'does not create any jobs' do
+ expect(build_names).to be_empty
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
index e53d2f4f975..004261bc617 100644
--- a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -64,7 +64,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
- let(:pipeline) { service.execute(merge_request) }
+ let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index b40b4f5645f..7602309627b 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -264,7 +264,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: files) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: default_branch ) }
- let(:pipeline) { service.execute(:push) }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
index 4e5fe622648..3d97b47473d 100644
--- a/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Flutter.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
index 151880e27a3..14aaf717453 100644
--- a/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Managed-Cluster-Applications.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, namespace: user.namespace, files: { 'README.md' => '' }) }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
diff --git a/spec/lib/gitlab/ci/templates/npm_spec.rb b/spec/lib/gitlab/ci/templates/npm_spec.rb
index 2456c9ae545..ea954690133 100644
--- a/spec/lib/gitlab/ci/templates/npm_spec.rb
+++ b/spec/lib/gitlab/ci/templates/npm_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'npm.gitlab-ci.yml' do
let(:pipeline_tag) { 'v1.2.1' }
let(:pipeline_ref) { pipeline_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
def create_branch(name:)
diff --git a/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..936cd6ac8aa
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Terraform.gitlab-ci.yml' do
+ before do
+ allow(Gitlab::Template::GitlabCiYmlTemplate).to receive(:excluded_patterns).and_return([])
+ end
+
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform') }
+
+ describe 'the created pipeline' do
+ let(:default_branch) { project.default_branch_or_main }
+ let(:pipeline_branch) { default_branch }
+ let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push).payload }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow(project).to receive(:default_branch).and_return(default_branch)
+ end
+
+ context 'on master branch' do
+ it 'creates init, validate and build jobs', :aggregate_failures do
+ expect(pipeline.errors).to be_empty
+ expect(build_names).to include('init', 'validate', 'build', 'deploy')
+ end
+ end
+
+ context 'outside the master branch' do
+ let(:pipeline_branch) { 'patch-1' }
+
+ before do
+ project.repository.create_branch(pipeline_branch, default_branch)
+ end
+
+ it 'does not creates a deploy and a test job', :aggregate_failures do
+ expect(pipeline.errors).to be_empty
+ expect(build_names).not_to include('deploy')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
index 5ab3035486f..3d1306e82a5 100644
--- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
+ let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -25,7 +25,8 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
end
context 'on master branch' do
- it 'creates init, validate and build jobs' do
+ it 'creates init, validate and build jobs', :aggregate_failures do
+ expect(pipeline.errors).to be_empty
expect(build_names).to include('init', 'validate', 'build', 'deploy')
end
end
@@ -37,7 +38,8 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
project.repository.create_branch(pipeline_branch, default_branch)
end
- it 'does not creates a deploy and a test job' do
+ it 'does not creates a deploy and a test job', :aggregate_failures do
+ expect(pipeline.errors).to be_empty
expect(build_names).not_to include('deploy')
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 19c2e34a0f0..49a470f9e01 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -42,7 +42,6 @@ module Gitlab
interruptible: true,
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -75,7 +74,6 @@ module Gitlab
],
allow_failure: false,
when: 'on_success',
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -115,7 +113,6 @@ module Gitlab
tag_list: %w[A B],
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true
})
@@ -163,7 +160,6 @@ module Gitlab
interruptible: true,
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -354,7 +350,6 @@ module Gitlab
name: "rspec",
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage,
@@ -368,7 +363,6 @@ module Gitlab
name: "prod",
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage,
@@ -847,7 +841,6 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -882,7 +875,6 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -913,7 +905,6 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -942,7 +933,6 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -955,7 +945,6 @@ module Gitlab
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:build) { subject.builds.first }
- let(:yaml_variables) { build[:yaml_variables] }
let(:job_variables) { build[:job_variables] }
let(:root_variables_inheritance) { build[:root_variables_inheritance] }
@@ -973,84 +962,11 @@ module Gitlab
end
it 'returns global variables' do
- expect(yaml_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
expect(job_variables).to eq([])
expect(root_variables_inheritance).to eq(true)
end
end
- context 'when job and global variables are defined' do
- let(:global_variables) do
- { 'VAR1' => 'global1', 'VAR3' => 'global3', 'VAR4' => 'global4' }
- end
-
- let(:build_variables) do
- { 'VAR1' => 'value1', 'VAR2' => 'value2' }
- end
-
- let(:config) do
- {
- before_script: ['pwd'],
- variables: global_variables,
- rspec: { script: 'rspec', variables: build_variables, inherit: inherit }
- }
- end
-
- context 'when no inheritance is specified' do
- let(:inherit) { }
-
- it 'returns all variables' do
- expect(yaml_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true },
- { key: 'VAR3', value: 'global3', public: true },
- { key: 'VAR4', value: 'global4', public: true }
- )
- expect(job_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- expect(root_variables_inheritance).to eq(true)
- end
- end
-
- context 'when inheritance is disabled' do
- let(:inherit) { { variables: false } }
-
- it 'does not inherit variables' do
- expect(yaml_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- expect(job_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- expect(root_variables_inheritance).to eq(false)
- end
- end
-
- context 'when specific variables are to inherited' do
- let(:inherit) { { variables: %w[VAR1 VAR4] } }
-
- it 'returns all variables and inherits only specified variables' do
- expect(yaml_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true },
- { key: 'VAR4', value: 'global4', public: true }
- )
- expect(job_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- expect(root_variables_inheritance).to eq(%w[VAR1 VAR4])
- end
- end
- end
-
context 'when job variables are defined' do
let(:config) do
{
@@ -1065,10 +981,6 @@ module Gitlab
end
it 'returns job variables' do
- expect(yaml_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
expect(job_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
@@ -1096,9 +1008,6 @@ module Gitlab
# When variables config is empty, we assume this is a valid
# configuration, see issue #18775
#
- expect(yaml_variables).to be_an_instance_of(Array)
- expect(yaml_variables).to be_empty
-
expect(job_variables).to eq([])
expect(root_variables_inheritance).to eq(true)
end
@@ -1115,9 +1024,6 @@ module Gitlab
end
it 'returns empty array' do
- expect(yaml_variables).to be_an_instance_of(Array)
- expect(yaml_variables).to be_empty
-
expect(job_variables).to eq([])
expect(root_variables_inheritance).to eq(true)
end
@@ -1246,6 +1152,10 @@ module Gitlab
end
it { is_expected.to be_valid }
+
+ it 'adds the job from the included file' do
+ expect(subject.builds.map { |build| build[:name] }).to contain_exactly('job1', 'rspec')
+ end
end
context "when the included internal file is not present" do
@@ -1349,7 +1259,7 @@ module Gitlab
end
it 'sets matrix variables' do
- build_variables = builds.map { |build| build[:yaml_variables] }
+ build_variables = builds.map { |build| build[:job_variables] }
expected_variables = [
[
{ key: 'VAR1', value: '1' },
@@ -1601,7 +1511,6 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -1972,7 +1881,6 @@ module Gitlab
},
when: 'on_success',
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -1988,7 +1896,6 @@ module Gitlab
],
when: 'on_success',
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :dag
@@ -2011,7 +1918,6 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -2028,7 +1934,6 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :dag
@@ -2057,7 +1962,6 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -2076,7 +1980,6 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :dag
@@ -2101,7 +2004,6 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :dag
@@ -2134,7 +2036,6 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :dag
@@ -2342,7 +2243,6 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -2391,7 +2291,6 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -2406,7 +2305,6 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: [],
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
@@ -2851,7 +2749,7 @@ module Gitlab
YAML
end
- it_behaves_like 'returns errors', 'The pipeline has circular dependencies.'
+ it_behaves_like 'returns errors', 'The pipeline has circular dependencies'
end
end
@@ -2883,7 +2781,7 @@ module Gitlab
expect(subject.valid?).to eq(false)
expect(subject.errors).to contain_exactly(
'jobs:rspec config contains unknown keys: bad_tags',
- 'jobs:rspec rules should be an array of hashes')
+ 'jobs:rspec rules should be an array containing hashes and arrays of hashes')
end
end
diff --git a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
index 85bafc77553..5a4e9001ac9 100644
--- a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
+++ b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
context 'when database meets minimum supported version' do
before do
- allow(Gitlab::Database).to receive(:postgresql_minimum_supported_version?).and_return(true)
+ allow(Gitlab::Database.main).to receive(:postgresql_minimum_supported_version?).and_return(true)
end
it { is_expected.to be_empty }
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
context 'when database does not meet minimum supported version' do
before do
- allow(Gitlab::Database).to receive(:postgresql_minimum_supported_version?).and_return(false)
+ allow(Gitlab::Database.main).to receive(:postgresql_minimum_supported_version?).and_return(false)
end
let(:notice_deprecated_database) do
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
'%{pg_version_minimum} is required for this version of GitLab. ' \
'Please upgrade your environment to a supported PostgreSQL version, ' \
'see %{pg_requirements_url} for details.') % {
- pg_version_current: Gitlab::Database.version,
+ pg_version_current: Gitlab::Database.main.version,
pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
pg_requirements_url: '<a href="https://docs.gitlab.com/ee/install/requirements.html#database">database requirements</a>'
}
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index f8a007cdd75..aac4936b20e 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -17,6 +17,18 @@ RSpec.describe Gitlab::Conflict::File do
let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit.oid, rugged_conflict, raw_conflict_content) }
let(:conflict_file) { described_class.new(raw_conflict_file, merge_request: merge_request) }
+ describe 'delegates' do
+ it { expect(conflict_file).to delegate_method(:type).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:content).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:path).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:ancestor_path).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:their_path).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:our_path).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:our_mode).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:our_blob).to(:raw) }
+ it { expect(conflict_file).to delegate_method(:repository).to(:raw) }
+ end
+
describe '#resolve_lines' do
let(:section_keys) { conflict_file.sections.map { |section| section[:id] }.compact }
@@ -324,4 +336,27 @@ RSpec.describe Gitlab::Conflict::File do
end
end
end
+
+ describe '#conflict_type' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:rugged_conflict) { { ancestor: { path: ancestor_path }, theirs: { path: their_path }, ours: { path: our_path } } }
+ let(:diff_file) { double(renamed_file?: renamed_file?) }
+
+ subject(:conflict_type) { conflict_file.conflict_type(diff_file) }
+
+ where(:ancestor_path, :their_path, :our_path, :renamed_file?, :result) do
+ '/ancestor/path' | '/their/path' | '/our/path' | false | :both_modified
+ '/ancestor/path' | '' | '/our/path' | false | :modified_source_removed_target
+ '/ancestor/path' | '/their/path' | '' | false | :modified_target_removed_source
+ '' | '/their/path' | '/our/path' | false | :both_added
+ '' | '' | '/our/path' | false | :removed_target_renamed_source
+ '' | '' | '/our/path' | true | :renamed_same_file
+ '' | '/their/path' | '' | false | :removed_source_renamed_target
+ end
+
+ with_them do
+ it { expect(conflict_type).to eq(result) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 8e63e771caa..239eff11bf3 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -19,14 +19,28 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
}
end
- describe '.default_settings_hash' do
- let(:settings) { described_class.default_settings_hash }
+ describe '.default_enabled' do
+ let(:enabled) { described_class.default_enabled }
- it 'returns defaults for all keys' do
- expect(settings['enabled']).to be_truthy
- expect(settings['report_only']).to be_falsey
+ it 'is enabled' do
+ expect(enabled).to be_truthy
+ end
+
+ context 'when in production' do
+ before do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ end
+
+ it 'is disabled' do
+ expect(enabled).to be_falsey
+ end
+ end
+ end
+
+ describe '.default_directives' do
+ let(:directives) { described_class.default_directives }
- directives = settings['directives']
+ it 'returns default directives' do
directive_names = (described_class::DIRECTIVES - ['report_uri'])
directive_names.each do |directive|
expect(directives.has_key?(directive)).to be_truthy
@@ -38,27 +52,25 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['child_src']).to eq(directives['frame_src'])
end
- context 'when in production' do
+ context 'when CDN host is defined' do
before do
- allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ stub_config_setting(cdn_host: 'https://example.com')
end
- it 'is disabled' do
- expect(settings['enabled']).to be_falsey
+ it 'adds CDN host to CSP' do
+ expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com https://example.com")
+ expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://example.com")
+ expect(directives['font_src']).to eq("'self' https://example.com")
end
end
- context 'when GITLAB_CDN_HOST is set' do
+ context 'when sentry is configured' do
before do
- stub_env('GITLAB_CDN_HOST', 'https://example.com')
+ stub_sentry_settings
end
- it 'adds GITLAB_CDN_HOST to CSP' do
- directives = settings['directives']
-
- expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com https://example.com")
- expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://example.com")
- expect(directives['font_src']).to eq("'self' https://example.com")
+ it 'adds sentry path to CSP without user' do
+ expect(directives['connect_src']).to eq("'self' dummy://example.com/43")
end
end
@@ -73,8 +85,6 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'does not add CUSTOMER_PORTAL_URL to CSP' do
- directives = settings['directives']
-
expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com")
end
end
@@ -85,8 +95,6 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds CUSTOMER_PORTAL_URL to CSP' do
- directives = settings['directives']
-
expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com https://customers.example.com")
end
end
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index d64dfc957ca..75741c52579 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
expect(data[:status]).to eq('failed')
expect(data[:status_changed_at]).to eq(status_changed_at)
+ expect(data[:deployment_id]).to eq(deployment.id)
expect(data[:deployable_id]).to eq(deployable.id)
expect(data[:deployable_url]).to eq(expected_deployable_url)
expect(data[:environment]).to eq("somewhere")
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index c05a044f0de..0e574c7aa84 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Pipeline do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
- let(:pipeline) do
+ let_it_be_with_reload(:pipeline) do
create(:ci_pipeline,
project: project,
status: 'success',
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
describe '.build' do
let(:data) { described_class.build(pipeline) }
let(:attributes) { data[:object_attributes] }
- let(:build_data) { data[:builds].first }
+ let(:build_data) { data[:builds].last }
let(:runner_data) { build_data[:runner] }
let(:project_data) { data[:project] }
@@ -51,9 +51,9 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end
context 'build with runner' do
- let!(:build) { create(:ci_build, pipeline: pipeline, runner: ci_runner) }
- let!(:tag_names) { %w(tag-1 tag-2) }
- let(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
+ let_it_be(:tag_names) { %w(tag-1 tag-2) }
+ let_it_be(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline, runner: ci_runner) }
it 'has runner attributes', :aggregate_failures do
expect(runner_data[:id]).to eq(ci_runner.id)
@@ -73,18 +73,15 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end
context 'pipeline with variables' do
- let(:build) { create(:ci_build, pipeline: pipeline) }
- let(:data) { described_class.build(pipeline) }
- let(:attributes) { data[:object_attributes] }
- let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline, key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1') }
+ let_it_be(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline, key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1') }
it { expect(attributes[:variables]).to be_a(Array) }
it { expect(attributes[:variables]).to contain_exactly({ key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1' }) }
end
context 'when pipeline is a detached merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.all_pipelines.first }
+ let_it_be(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let_it_be(:pipeline) { merge_request.all_pipelines.first }
it 'returns a source ref' do
expect(attributes[:ref]).to eq(merge_request.source_branch)
@@ -108,21 +105,67 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end
context 'when pipeline has retried builds' do
- before do
- create(:ci_build, :retried, pipeline: pipeline)
- end
+ let_it_be(:retried_build) { create(:ci_build, :retried, pipeline: pipeline) }
it 'does not contain retried builds in payload' do
- expect(data[:builds].count).to eq(1)
- expect(build_data[:id]).to eq(build.id)
+ builds = data[:builds]
+
+ expect(builds.pluck(:id)).to contain_exactly(build.id)
+ end
+
+ it 'contains retried builds if requested' do
+ builds = data.with_retried_builds[:builds]
+
+ expect(builds.pluck(:id)).to contain_exactly(build.id, retried_build.id)
end
end
context 'build with environment' do
- let!(:build) { create(:ci_build, :teardown_environment, pipeline: pipeline) }
+ let_it_be(:build) { create(:ci_build, :environment_with_deployment_tier, :with_deployment, pipeline: pipeline) }
+
+ let(:build_environment_data) { build_data[:environment] }
+
+ it 'has environment attributes', :aggregate_failures do
+ expect(build_environment_data[:name]).to eq(build.expanded_environment_name)
+ expect(build_environment_data[:action]).to eq(build.environment_action)
+ expect(build_environment_data[:deployment_tier]).to eq(build.persisted_environment.try(:tier))
+ end
+ end
- it { expect(build_data[:environment][:name]).to eq(build.expanded_environment_name) }
- it { expect(build_data[:environment][:action]).to eq(build.environment_action) }
+ context 'avoids N+1 database queries' do
+ it "with multiple builds" do
+ # Preparing the pipeline with the minimal builds
+ pipeline = create(:ci_pipeline, user: user, project: project)
+ create(:ci_build, user: user, project: project, pipeline: pipeline)
+ create(:ci_build, :deploy_to_production, :with_deployment, user: user, project: project, pipeline: pipeline)
+
+ # We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
+ control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }.count
+
+ # Adding more builds to the pipeline and serializing the data again
+ create_list(:ci_build, 3, user: user, project: project, pipeline: pipeline)
+ create(:ci_build, :start_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
+ create(:ci_build, :stop_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
+
+ expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control_count)
+ end
+
+ it "with multiple retried builds" do
+ # Preparing the pipeline with the minimal builds
+ pipeline = create(:ci_pipeline, user: user, project: project)
+ create(:ci_build, :retried, user: user, project: project, pipeline: pipeline)
+ create(:ci_build, :deploy_to_production, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
+
+ # We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
+ control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }.count
+
+ # Adding more builds to the pipeline and serializing the data again
+ create_list(:ci_build, 3, :retried, user: user, project: project, pipeline: pipeline)
+ create(:ci_build, :start_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
+ create(:ci_build, :stop_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
+
+ expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control_count)
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
new file mode 100644
index 00000000000..b4010d0fe8d
--- /dev/null
+++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
+ describe '#perform' do
+ subject { described_class.new(async_index) }
+
+ let(:async_index) { create(:postgres_async_index) }
+
+ let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
+
+ let(:connection) { ApplicationRecord.connection }
+
+ context 'when the index already exists' do
+ before do
+ connection.execute(async_index.definition)
+ end
+
+ it 'skips index creation' do
+ expect(connection).not_to receive(:execute).with(/CREATE INDEX/)
+
+ subject.perform
+ end
+ end
+
+ it 'creates the index while controlling statement timeout' do
+ allow(connection).to receive(:execute).and_call_original
+ expect(connection).to receive(:execute).with("SET statement_timeout TO '32400s'").ordered.and_call_original
+ expect(connection).to receive(:execute).with(async_index.definition).ordered.and_call_original
+ expect(connection).to receive(:execute).with("RESET statement_timeout").ordered.and_call_original
+
+ subject.perform
+ end
+
+ it 'removes the index preparation record from postgres_async_indexes' do
+ expect(async_index).to receive(:destroy).and_call_original
+
+ expect { subject.perform }.to change { index_model.count }.by(-1)
+ end
+
+ it 'skips logic if not able to acquire exclusive lease' do
+ expect(subject).to receive(:try_obtain_lease).and_return(false)
+ expect(connection).not_to receive(:execute).with(/CREATE INDEX/)
+ expect(async_index).not_to receive(:destroy)
+
+ expect { subject.perform }.not_to change { index_model.count }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
new file mode 100644
index 00000000000..ed15951dfb0
--- /dev/null
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
+ let(:migration) { ActiveRecord::Migration.new.extend(described_class) }
+ let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
+ let(:connection) { ApplicationRecord.connection }
+ let(:table_name) { '_test_async_indexes' }
+ let(:index_name) { "index_#{table_name}_on_id" }
+
+ before do
+ allow(migration).to receive(:puts)
+ end
+
+ describe '#unprepare_async_index' do
+ let!(:async_index) { create(:postgres_async_index, name: index_name) }
+
+ context 'when the flag is enabled' do
+ before do
+ stub_feature_flags(database_async_index_creation: true)
+ end
+
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index(table_name, 'id')
+ end.to change { index_model.where(name: index_name).count }.by(-1)
+ end
+
+ context 'when an explicit name is given' do
+ let(:index_name) { 'my_test_async_index' }
+
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index(table_name, 'id', name: index_name)
+ end.to change { index_model.where(name: index_name).count }.by(-1)
+ end
+ end
+
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
+
+ expect(index_model).not_to receive(:find_by)
+
+ expect { migration.unprepare_async_index(table_name, 'id') }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ it 'does not destroy the record' do
+ stub_feature_flags(database_async_index_creation: false)
+
+ expect do
+ migration.unprepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+ end
+
+ describe '#unprepare_async_index_by_name' do
+ let(:index_name) { "index_#{table_name}_on_id" }
+ let!(:async_index) { create(:postgres_async_index, name: index_name) }
+
+ context 'when the flag is enabled' do
+ before do
+ stub_feature_flags(database_async_index_creation: true)
+ end
+
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index_by_name(table_name, index_name)
+ end.to change { index_model.where(name: index_name).count }.by(-1)
+ end
+
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
+
+ expect(index_model).not_to receive(:find_by)
+
+ expect { migration.unprepare_async_index_by_name(table_name, index_name) }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ it 'does not destroy the record' do
+ stub_feature_flags(database_async_index_creation: false)
+
+ expect do
+ migration.unprepare_async_index_by_name(table_name, index_name)
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+ end
+
+ describe '#prepare_async_index' do
+ before do
+ connection.create_table(table_name)
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(database_async_index_creation: true)
+ end
+
+ it 'creates the record for the async index' do
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.to change { index_model.where(name: index_name).count }.by(1)
+
+ record = index_model.find_by(name: index_name)
+
+ expect(record.table_name).to eq(table_name)
+ expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
+ end
+
+ context 'when an explicit name is given' do
+ let(:index_name) { 'my_async_index_name' }
+
+ it 'creates the record with the given name' do
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.to change { index_model.where(name: index_name).count }.by(1)
+
+ record = index_model.find_by(name: index_name)
+
+ expect(record.table_name).to eq(table_name)
+ expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
+ end
+ end
+
+ context 'when the index already exists' do
+ it 'does not create the record' do
+ connection.add_index(table_name, 'id', name: index_name)
+
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+
+ context 'when the record already exists' do
+ it 'does attempt to create the record' do
+ create(:postgres_async_index, table_name: table_name, name: index_name)
+
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
+
+ expect(index_model).not_to receive(:safe_find_or_create_by!)
+
+ expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ it 'does not create the record' do
+ stub_feature_flags(database_async_index_creation: false)
+
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
new file mode 100644
index 00000000000..434cba4edde
--- /dev/null
+++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model do
+ describe 'validations' do
+ let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
+ let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH }
+
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) }
+ it { is_expected.to validate_presence_of(:table_name) }
+ it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) }
+ it { is_expected.to validate_presence_of(:definition) }
+ it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) }
+ end
+end
diff --git a/spec/lib/gitlab/database/async_indexes_spec.rb b/spec/lib/gitlab/database/async_indexes_spec.rb
new file mode 100644
index 00000000000..74e30ea2c4e
--- /dev/null
+++ b/spec/lib/gitlab/database/async_indexes_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncIndexes do
+ describe '.create_pending_indexes!' do
+ subject { described_class.create_pending_indexes! }
+
+ before do
+ create_list(:postgres_async_index, 4)
+ end
+
+ it 'takes 2 pending indexes and creates those' do
+ Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.order(:id).limit(2).each do |index|
+ creator = double('index creator')
+ expect(Gitlab::Database::AsyncIndexes::IndexCreator).to receive(:new).with(index).and_return(creator)
+ expect(creator).to receive(:perform)
+ end
+
+ subject
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/connection_spec.rb b/spec/lib/gitlab/database/connection_spec.rb
new file mode 100644
index 00000000000..5e0e6039afc
--- /dev/null
+++ b/spec/lib/gitlab/database/connection_spec.rb
@@ -0,0 +1,467 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Connection do
+ let(:connection) { described_class.new }
+
+ describe '#default_pool_size' do
+ before do
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
+ end
+
+ it 'returns the max thread size plus a fixed headroom of 10' do
+ expect(connection.default_pool_size).to eq(17)
+ end
+
+ it 'returns the max thread size plus a DB_POOL_HEADROOM if this env var is present' do
+ stub_env('DB_POOL_HEADROOM', '7')
+
+ expect(connection.default_pool_size).to eq(14)
+ end
+ end
+
+ describe '#config' do
+ it 'returns a HashWithIndifferentAccess' do
+ expect(connection.config).to be_an_instance_of(HashWithIndifferentAccess)
+ end
+
+ it 'returns a default pool size' do
+ expect(connection.config).to include(pool: connection.default_pool_size)
+ end
+
+ it 'does not cache its results' do
+ a = connection.config
+ b = connection.config
+
+ expect(a).not_to equal(b)
+ end
+ end
+
+ describe '#pool_size' do
+ context 'when no explicit size is configured' do
+ it 'returns the default pool size' do
+ expect(connection).to receive(:config).and_return({ pool: nil })
+
+ expect(connection.pool_size).to eq(connection.default_pool_size)
+ end
+ end
+
+ context 'when an explicit pool size is set' do
+ it 'returns the pool size' do
+ expect(connection).to receive(:config).and_return({ pool: 4 })
+
+ expect(connection.pool_size).to eq(4)
+ end
+ end
+ end
+
+ describe '#username' do
+ context 'when a username is set' do
+ it 'returns the username' do
+ allow(connection).to receive(:config).and_return(username: 'bob')
+
+ expect(connection.username).to eq('bob')
+ end
+ end
+
+ context 'when a username is not set' do
+ it 'returns the value of the USER environment variable' do
+ allow(connection).to receive(:config).and_return(username: nil)
+ allow(ENV).to receive(:[]).with('USER').and_return('bob')
+
+ expect(connection.username).to eq('bob')
+ end
+ end
+ end
+
+ describe '#database_name' do
+ it 'returns the name of the database' do
+ allow(connection).to receive(:config).and_return(database: 'test')
+
+ expect(connection.database_name).to eq('test')
+ end
+ end
+
+ describe '#adapter_name' do
+ it 'returns the database adapter name' do
+ allow(connection).to receive(:config).and_return(adapter: 'test')
+
+ expect(connection.adapter_name).to eq('test')
+ end
+ end
+
+ describe '#human_adapter_name' do
+ context 'when the adapter is PostgreSQL' do
+ it 'returns PostgreSQL' do
+ allow(connection).to receive(:config).and_return(adapter: 'postgresql')
+
+ expect(connection.human_adapter_name).to eq('PostgreSQL')
+ end
+ end
+
+ context 'when the adapter is not PostgreSQL' do
+ it 'returns Unknown' do
+ allow(connection).to receive(:config).and_return(adapter: 'kittens')
+
+ expect(connection.human_adapter_name).to eq('Unknown')
+ end
+ end
+ end
+
+ describe '#postgresql?' do
+ context 'when using PostgreSQL' do
+ it 'returns true' do
+ allow(connection).to receive(:adapter_name).and_return('PostgreSQL')
+
+ expect(connection.postgresql?).to eq(true)
+ end
+ end
+
+ context 'when not using PostgreSQL' do
+ it 'returns false' do
+ allow(connection).to receive(:adapter_name).and_return('MySQL')
+
+ expect(connection.postgresql?).to eq(false)
+ end
+ end
+ end
+
+ describe '#db_config_with_default_pool_size' do
+ it 'returns db_config with our default pool size' do
+ allow(connection).to receive(:default_pool_size).and_return(9)
+
+ expect(connection.db_config_with_default_pool_size.pool).to eq(9)
+ end
+
+ it 'returns db_config with the correct database name' do
+ db_name = connection.scope.connection.pool.db_config.name
+
+ expect(connection.db_config_with_default_pool_size.name).to eq(db_name)
+ end
+ end
+
+ describe '#disable_prepared_statements' do
+ around do |example|
+ original_config = ::Gitlab::Database.main.config
+
+ example.run
+
+ connection.scope.establish_connection(original_config)
+ end
+
+ it 'disables prepared statements' do
+ connection.scope.establish_connection(
+ ::Gitlab::Database.main.config.merge(prepared_statements: true)
+ )
+
+ expect(connection.scope.connection.prepared_statements).to eq(true)
+
+ connection.disable_prepared_statements
+
+ expect(connection.scope.connection.prepared_statements).to eq(false)
+ end
+
+ context 'with dynamic connection pool size' do
+ before do
+ connection.scope.establish_connection(connection.config.merge(pool: 7))
+ end
+
+ it 'retains the set pool size' do
+ connection.disable_prepared_statements
+
+ expect(connection.scope.connection.prepared_statements).to eq(false)
+ expect(connection.scope.connection.pool.size).to eq(7)
+ end
+ end
+ end
+
+ describe '#db_read_only?' do
+ it 'detects a read-only database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => "t" }])
+
+ expect(connection.db_read_only?).to be_truthy
+ end
+
+ it 'detects a read-only database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => true }])
+
+ expect(connection.db_read_only?).to be_truthy
+ end
+
+ it 'detects a read-write database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => "f" }])
+
+ expect(connection.db_read_only?).to be_falsey
+ end
+
+ it 'detects a read-write database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => false }])
+
+ expect(connection.db_read_only?).to be_falsey
+ end
+ end
+
+ describe '#db_read_write?' do
+ it 'detects a read-only database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => "t" }])
+
+ expect(connection.db_read_write?).to eq(false)
+ end
+
+ it 'detects a read-only database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => true }])
+
+ expect(connection.db_read_write?).to eq(false)
+ end
+
+ it 'detects a read-write database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => "f" }])
+
+ expect(connection.db_read_write?).to eq(true)
+ end
+
+ it 'detects a read-write database' do
+ allow(connection.scope.connection)
+ .to receive(:execute)
+ .with('SELECT pg_is_in_recovery()')
+ .and_return([{ "pg_is_in_recovery" => false }])
+
+ expect(connection.db_read_write?).to eq(true)
+ end
+ end
+
+ describe '#version' do
+ around do |example|
+ connection.instance_variable_set(:@version, nil)
+ example.run
+ connection.instance_variable_set(:@version, nil)
+ end
+
+ context "on postgresql" do
+ it "extracts the version number" do
+ allow(connection)
+ .to receive(:database_version)
+ .and_return("PostgreSQL 9.4.4 on x86_64-apple-darwin14.3.0")
+
+ expect(connection.version).to eq '9.4.4'
+ end
+ end
+
+ it 'memoizes the result' do
+ count = ActiveRecord::QueryRecorder
+ .new { 2.times { connection.version } }
+ .count
+
+ expect(count).to eq(1)
+ end
+ end
+
+ describe '#postgresql_minimum_supported_version?' do
+ it 'returns false when using PostgreSQL 10' do
+ allow(connection).to receive(:version).and_return('10')
+
+ expect(connection.postgresql_minimum_supported_version?).to eq(false)
+ end
+
+ it 'returns false when using PostgreSQL 11' do
+ allow(connection).to receive(:version).and_return('11')
+
+ expect(connection.postgresql_minimum_supported_version?).to eq(false)
+ end
+
+ it 'returns true when using PostgreSQL 12' do
+ allow(connection).to receive(:version).and_return('12')
+
+ expect(connection.postgresql_minimum_supported_version?).to eq(true)
+ end
+ end
+
+ describe '#bulk_insert' do
+ before do
+ allow(connection).to receive(:connection).and_return(dummy_connection)
+ allow(dummy_connection).to receive(:quote_column_name, &:itself)
+ allow(dummy_connection).to receive(:quote, &:itself)
+ allow(dummy_connection).to receive(:execute)
+ end
+
+ let(:dummy_connection) { double(:connection) }
+
+ let(:rows) do
+ [
+ { a: 1, b: 2, c: 3 },
+ { c: 6, a: 4, b: 5 }
+ ]
+ end
+
+ it 'does nothing with empty rows' do
+ expect(dummy_connection).not_to receive(:execute)
+
+ connection.bulk_insert('test', [])
+ end
+
+ it 'uses the ordering from the first row' do
+ expect(dummy_connection).to receive(:execute) do |sql|
+ expect(sql).to include('(1, 2, 3)')
+ expect(sql).to include('(4, 5, 6)')
+ end
+
+ connection.bulk_insert('test', rows)
+ end
+
+ it 'quotes column names' do
+ expect(dummy_connection).to receive(:quote_column_name).with(:a)
+ expect(dummy_connection).to receive(:quote_column_name).with(:b)
+ expect(dummy_connection).to receive(:quote_column_name).with(:c)
+
+ connection.bulk_insert('test', rows)
+ end
+
+ it 'quotes values' do
+ 1.upto(6) do |i|
+ expect(dummy_connection).to receive(:quote).with(i)
+ end
+
+ connection.bulk_insert('test', rows)
+ end
+
+ it 'does not quote values of a column in the disable_quote option' do
+ [1, 2, 4, 5].each do |i|
+ expect(dummy_connection).to receive(:quote).with(i)
+ end
+
+ connection.bulk_insert('test', rows, disable_quote: :c)
+ end
+
+ it 'does not quote values of columns in the disable_quote option' do
+ [2, 5].each do |i|
+ expect(dummy_connection).to receive(:quote).with(i)
+ end
+
+ connection.bulk_insert('test', rows, disable_quote: [:a, :c])
+ end
+
+ it 'handles non-UTF-8 data' do
+ expect { connection.bulk_insert('test', [{ a: "\255" }]) }.not_to raise_error
+ end
+
+ context 'when using PostgreSQL' do
+ it 'allows the returning of the IDs of the inserted rows' do
+ result = double(:result, values: [['10']])
+
+ expect(dummy_connection)
+ .to receive(:execute)
+ .with(/RETURNING id/)
+ .and_return(result)
+
+ ids = connection
+ .bulk_insert('test', [{ number: 10 }], return_ids: true)
+
+ expect(ids).to eq([10])
+ end
+
+ it 'allows setting the upsert to do nothing' do
+ expect(dummy_connection)
+ .to receive(:execute)
+ .with(/ON CONFLICT DO NOTHING/)
+
+ connection
+ .bulk_insert('test', [{ number: 10 }], on_conflict: :do_nothing)
+ end
+ end
+ end
+
+ describe '#cached_column_exists?' do
+ it 'only retrieves data once' do
+ expect(connection.scope.connection)
+ .to receive(:columns)
+ .once.and_call_original
+
+ 2.times do
+ expect(connection.cached_column_exists?(:projects, :id)).to be_truthy
+ expect(connection.cached_column_exists?(:projects, :bogus_column)).to be_falsey
+ end
+ end
+ end
+
+ describe '#cached_table_exists?' do
+ it 'only retrieves data once per table' do
+ expect(connection.scope.connection)
+ .to receive(:data_source_exists?)
+ .with(:projects)
+ .once.and_call_original
+
+ expect(connection.scope.connection)
+ .to receive(:data_source_exists?)
+ .with(:bogus_table_name)
+ .once.and_call_original
+
+ 2.times do
+ expect(connection.cached_table_exists?(:projects)).to be_truthy
+ expect(connection.cached_table_exists?(:bogus_table_name)).to be_falsey
+ end
+ end
+
+ it 'returns false when database does not exist' do
+ expect(connection.scope).to receive(:connection) do
+ raise ActiveRecord::NoDatabaseError, 'broken'
+ end
+
+ expect(connection.cached_table_exists?(:projects)).to be(false)
+ end
+ end
+
+ describe '#exists?' do
+ it 'returns true if `ActiveRecord::Base.connection` succeeds' do
+ expect(connection.scope).to receive(:connection)
+
+ expect(connection.exists?).to be(true)
+ end
+
+ it 'returns false if `ActiveRecord::Base.connection` fails' do
+ expect(connection.scope).to receive(:connection) do
+ raise ActiveRecord::NoDatabaseError, 'broken'
+ end
+
+ expect(connection.exists?).to be(false)
+ end
+ end
+
+ describe '#system_id' do
+ it 'returns the PostgreSQL system identifier' do
+ expect(connection.system_id).to be_an_instance_of(Integer)
+ end
+ end
+
+ describe '#get_write_location' do
+ it 'returns a string' do
+ expect(connection.get_write_location(connection.scope.connection))
+ .to be_a(String)
+ end
+
+ it 'returns nil if there are no results' do
+ expect(connection.get_write_location(double(select_all: []))).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
index 015dd2ba8d2..0ca99ec9acf 100644
--- a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
describe '#select' do
it 'performs a read' do
- expect(proxy).to receive(:read_using_load_balancer).with(:select, ['foo'])
+ expect(proxy).to receive(:read_using_load_balancer).with(:select, 'foo')
proxy.select('foo')
end
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
arel = double(:arel)
expect(proxy).to receive(:read_using_load_balancer)
- .with(:select_all, [arel, 'foo', []])
+ .with(:select_all, arel, 'foo', [])
proxy.select_all(arel, 'foo')
end
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
arel = double(:arel, locked: true)
expect(proxy).to receive(:write_using_load_balancer)
- .with(:select_all, [arel, 'foo', []], sticky: true)
+ .with(:select_all, arel, 'foo', [], sticky: true)
proxy.select_all(arel, 'foo')
end
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
describe "#{name}" do
it 'runs the query on the replica' do
expect(proxy).to receive(:read_using_load_balancer)
- .with(name, ['foo'])
+ .with(name, 'foo')
proxy.send(name, 'foo')
end
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
describe "#{name}" do
it 'runs the query on the primary and sticks to it' do
expect(proxy).to receive(:write_using_load_balancer)
- .with(name, ['foo'], sticky: true)
+ .with(name, 'foo', sticky: true)
proxy.send(name, 'foo')
end
@@ -187,7 +187,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
describe '#method_missing' do
it 'runs the query on the primary without sticking to it' do
expect(proxy).to receive(:write_using_load_balancer)
- .with(:foo, ['foo'])
+ .with(:foo, 'foo')
proxy.foo('foo')
end
@@ -197,7 +197,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(proxy).to receive(:write_using_load_balancer).and_call_original
- expect { proxy.case_sensitive_comparison(:table, :attribute, :column, { value: :value, format: :format }) }
+ expect { proxy.case_sensitive_comparison(:table, :attribute, :column, value: :value, format: :format) }
.not_to raise_error
end
@@ -212,7 +212,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
end
it 'runs the query on the replica' do
- expect(proxy).to receive(:read_using_load_balancer).with(:foo, ['foo'])
+ expect(proxy).to receive(:read_using_load_balancer).with(:foo, 'foo')
proxy.foo('foo')
end
@@ -222,7 +222,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(proxy).to receive(:read_using_load_balancer).and_call_original
- expect { proxy.case_sensitive_comparison(:table, :attribute, :column, { value: :value, format: :format }) }
+ expect { proxy.case_sensitive_comparison(:table, :attribute, :column, value: :value, format: :format) }
.not_to raise_error
end
end
@@ -245,7 +245,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(connection).to receive(:foo).with('foo')
expect(proxy.load_balancer).to receive(:read).and_yield(connection)
- proxy.read_using_load_balancer(:foo, ['foo'])
+ proxy.read_using_load_balancer(:foo, 'foo')
end
end
@@ -257,7 +257,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(connection).to receive(:foo).with('foo')
expect(proxy.load_balancer).to receive(:read).and_yield(connection)
- proxy.read_using_load_balancer(:foo, ['foo'])
+ proxy.read_using_load_balancer(:foo, 'foo')
end
end
@@ -269,7 +269,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(connection).to receive(:foo).with('foo')
expect(proxy.load_balancer).to receive(:read).and_yield(connection)
- proxy.read_using_load_balancer(:foo, ['foo'])
+ proxy.read_using_load_balancer(:foo, 'foo')
end
end
@@ -283,7 +283,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(proxy.load_balancer).to receive(:read_write)
.and_yield(connection)
- proxy.read_using_load_balancer(:foo, ['foo'])
+ proxy.read_using_load_balancer(:foo, 'foo')
end
end
end
@@ -302,7 +302,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(connection).to receive(:foo).with('foo')
expect(session).not_to receive(:write!)
- proxy.write_using_load_balancer(:foo, ['foo'])
+ proxy.write_using_load_balancer(:foo, 'foo')
end
it 'sticks to the primary when sticking is enabled' do
@@ -310,7 +310,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
expect(connection).to receive(:foo).with('foo')
expect(session).to receive(:write!)
- proxy.write_using_load_balancer(:foo, ['foo'], sticky: true)
+ proxy.write_using_load_balancer(:foo, 'foo', sticky: true)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/host_list_spec.rb b/spec/lib/gitlab/database/load_balancing/host_list_spec.rb
index 873b599f84d..ad4ca18d5e6 100644
--- a/spec/lib/gitlab/database/load_balancing/host_list_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_list_spec.rb
@@ -3,25 +3,17 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::HostList do
- def expect_metrics(hosts)
- expect(Gitlab::Metrics.registry.get(:db_load_balancing_hosts).get({})).to eq(hosts)
- end
-
- before do
- allow(Gitlab::Database)
- .to receive(:create_connection_pool)
- .and_return(ActiveRecord::Base.connection_pool)
- end
-
+ let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
let(:load_balancer) { double(:load_balancer) }
let(:host_count) { 2 }
+ let(:hosts) { Array.new(host_count) { Gitlab::Database::LoadBalancing::Host.new(db_host, load_balancer, port: 5432) } }
+ let(:host_list) { described_class.new(hosts) }
- let(:host_list) do
- hosts = Array.new(host_count) do
- Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer, port: 5432)
+ before do
+ # each call generate a new replica pool
+ allow(load_balancer).to receive(:create_replica_connection_pool) do
+ double(:replica_connection_pool)
end
-
- described_class.new(hosts)
end
describe '#initialize' do
@@ -42,8 +34,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
context 'with ports' do
it 'returns the host names of all hosts' do
hosts = [
- ['localhost', 5432],
- ['localhost', 5432]
+ [db_host, 5432],
+ [db_host, 5432]
]
expect(host_list.host_names_and_ports).to eq(hosts)
@@ -51,18 +43,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
end
context 'without ports' do
- let(:host_list) do
- hosts = Array.new(2) do
- Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer)
- end
-
- described_class.new(hosts)
- end
+ let(:hosts) { Array.new(2) { Gitlab::Database::LoadBalancing::Host.new(db_host, load_balancer) } }
it 'returns the host names of all hosts' do
hosts = [
- ['localhost', nil],
- ['localhost', nil]
+ [db_host, nil],
+ [db_host, nil]
]
expect(host_list.host_names_and_ports).to eq(hosts)
@@ -70,48 +56,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
end
end
- describe '#manage_pool?' do
- before do
- allow(Gitlab::Database).to receive(:create_connection_pool) { double(:connection) }
- end
-
- context 'when the testing pool belongs to one host of the host list' do
- it 'returns true' do
- pool = host_list.hosts.first.pool
-
- expect(host_list.manage_pool?(pool)).to be(true)
- end
- end
-
- context 'when the testing pool belongs to a former host of the host list' do
- it 'returns false' do
- pool = host_list.hosts.first.pool
- host_list.hosts = [
- Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
- ]
-
- expect(host_list.manage_pool?(pool)).to be(false)
- end
- end
-
- context 'when the testing pool belongs to a new host of the host list' do
- it 'returns true' do
- host = Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
- host_list.hosts = [host]
-
- expect(host_list.manage_pool?(host.pool)).to be(true)
- end
- end
-
- context 'when the testing pool does not have any relation with the host list' do
- it 'returns false' do
- host = Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
-
- expect(host_list.manage_pool?(host.pool)).to be(false)
- end
- end
- end
-
describe '#hosts' do
it 'returns a copy of the host' do
first = host_list.hosts
@@ -185,4 +129,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::HostList do
end
end
end
+
+ def expect_metrics(hosts)
+ expect(Gitlab::Metrics.registry.get(:db_load_balancing_hosts).get({})).to eq(hosts)
+ end
end
diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb
index 4dfddef68c8..f42ac8be1bb 100644
--- a/spec/lib/gitlab/database/load_balancing/host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb
@@ -3,15 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::Host do
- let(:load_balancer) do
- Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[localhost])
- end
+ let(:load_balancer) { Gitlab::Database::LoadBalancing::LoadBalancer.new }
- let(:host) { load_balancer.host_list.hosts.first }
+ let(:host) do
+ Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer)
+ end
before do
- allow(Gitlab::Database).to receive(:create_connection_pool)
- .and_return(ActiveRecord::Base.connection_pool)
+ allow(load_balancer).to receive(:create_replica_connection_pool) do
+ ActiveRecord::Base.connection_pool
+ end
end
def raise_and_wrap(wrapper, original)
@@ -63,7 +64,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host.pool)
.to receive(:disconnect!)
- host.disconnect!(1)
+ host.disconnect!(timeout: 1)
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index b82b8d9a311..c647f5a8f5d 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -3,20 +3,22 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
- let(:pool) { Gitlab::Database.create_connection_pool(2) }
let(:conflict_error) { Class.new(RuntimeError) }
-
- let(:lb) { described_class.new(%w(localhost localhost)) }
+ let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
+ let(:lb) { described_class.new([db_host, db_host]) }
+ let(:request_cache) { lb.send(:request_cache) }
before do
- allow(Gitlab::Database).to receive(:create_connection_pool)
- .and_return(pool)
stub_const(
'Gitlab::Database::LoadBalancing::LoadBalancer::PG::TRSerializationFailure',
conflict_error
)
end
+ after do |example|
+ lb.disconnect!(timeout: 0) unless example.metadata[:skip_disconnect]
+ end
+
def raise_and_wrap(wrapper, original)
raise original
rescue original.class
@@ -123,8 +125,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
describe '#read_write' do
it 'yields a connection for a write' do
- expect { |b| lb.read_write(&b) }
- .to yield_with_args(ActiveRecord::Base.retrieve_connection)
+ connection = ActiveRecord::Base.connection_pool.connection
+
+ expect { |b| lb.read_write(&b) }.to yield_with_args(connection)
end
it 'uses a retry with exponential backoffs' do
@@ -134,140 +137,30 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe '#db_role_for_connection' do
- context 'when the load balancer creates the connection with #read' do
- it 'returns :replica' do
- role = nil
- lb.read do |connection|
- role = lb.db_role_for_connection(connection)
- end
-
- expect(role).to be(:replica)
- end
- end
-
- context 'when the load balancer uses nested #read' do
- it 'returns :replica' do
- roles = []
- lb.read do |connection_1|
- lb.read do |connection_2|
- roles << lb.db_role_for_connection(connection_2)
- end
- roles << lb.db_role_for_connection(connection_1)
- end
-
- expect(roles).to eq([:replica, :replica])
- end
- end
-
- context 'when the load balancer creates the connection with #read_write' do
- it 'returns :primary' do
- role = nil
- lb.read_write do |connection|
- role = lb.db_role_for_connection(connection)
- end
-
- expect(role).to be(:primary)
- end
- end
-
- context 'when the load balancer uses nested #read_write' do
- it 'returns :primary' do
- roles = []
- lb.read_write do |connection_1|
- lb.read_write do |connection_2|
- roles << lb.db_role_for_connection(connection_2)
- end
- roles << lb.db_role_for_connection(connection_1)
- end
-
- expect(roles).to eq([:primary, :primary])
- end
- end
-
- context 'when the load balancer falls back the connection creation to primary' do
- it 'returns :primary' do
- allow(lb).to receive(:serialization_failure?).and_return(true)
-
- role = nil
- raised = 7 # 2 hosts = 6 retries
-
- lb.read do |connection|
- if raised > 0
- raised -= 1
- raise
- end
-
- role = lb.db_role_for_connection(connection)
- end
-
- expect(role).to be(:primary)
- end
- end
-
- context 'when the load balancer uses replica after recovery from a failure' do
- it 'returns :replica' do
- allow(lb).to receive(:connection_error?).and_return(true)
-
- role = nil
- raised = false
-
- lb.read do |connection|
- unless raised
- raised = true
- raise
- end
-
- role = lb.db_role_for_connection(connection)
- end
-
- expect(role).to be(:replica)
- end
- end
-
- context 'when the connection comes from a pool managed by the host list' do
- it 'returns :replica' do
- connection = double(:connection)
- allow(connection).to receive(:pool).and_return(lb.host_list.hosts.first.pool)
-
- expect(lb.db_role_for_connection(connection)).to be(:replica)
- end
- end
-
- context 'when the connection comes from the primary pool' do
- it 'returns :primary' do
- connection = double(:connection)
- allow(connection).to receive(:pool).and_return(ActiveRecord::Base.connection_pool)
-
- expect(lb.db_role_for_connection(connection)).to be(:primary)
- end
- end
-
- context 'when the connection does not come from any known pool' do
- it 'returns nil' do
- connection = double(:connection)
- pool = double(:connection_pool)
- allow(connection).to receive(:pool).and_return(pool)
-
- expect(lb.db_role_for_connection(connection)).to be(nil)
- end
- end
- end
-
describe '#host' do
it 'returns the secondary host to use' do
expect(lb.host).to be_an_instance_of(Gitlab::Database::LoadBalancing::Host)
end
it 'stores the host in a thread-local variable' do
- RequestStore.delete(described_class::CACHE_KEY)
- RequestStore.delete(described_class::VALID_HOSTS_CACHE_KEY)
+ request_cache.delete(described_class::CACHE_KEY)
expect(lb.host_list).to receive(:next).once.and_call_original
lb.host
lb.host
end
+
+ it 'does not create conflicts with other load balancers when caching hosts' do
+ lb1 = described_class.new([db_host, db_host], ActiveRecord::Base)
+ lb2 = described_class.new([db_host, db_host], Ci::CiDatabaseRecord)
+
+ host1 = lb1.host
+ host2 = lb2.host
+
+ expect(lb1.send(:request_cache)[described_class::CACHE_KEY]).to eq(host1)
+ expect(lb2.send(:request_cache)[described_class::CACHE_KEY]).to eq(host2)
+ end
end
describe '#release_host' do
@@ -278,8 +171,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
lb.release_host
- expect(RequestStore[described_class::CACHE_KEY]).to be_nil
- expect(RequestStore[described_class::VALID_HOSTS_CACHE_KEY]).to be_nil
+ expect(request_cache[described_class::CACHE_KEY]).to be_nil
end
end
@@ -414,89 +306,76 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe '#select_caught_up_hosts' do
+ describe '#select_up_to_date_host' do
let(:location) { 'AB/12345'}
let(:hosts) { lb.host_list.hosts }
- let(:valid_host_list) { RequestStore[described_class::VALID_HOSTS_CACHE_KEY] }
- let(:valid_hosts) { valid_host_list.hosts }
+ let(:set_host) { request_cache[described_class::CACHE_KEY] }
- subject { lb.select_caught_up_hosts(location) }
-
- context 'when all replicas are caught up' do
- before do
- expect(hosts).to all(receive(:caught_up?).with(location).and_return(true))
- end
-
- it 'returns true and sets all hosts to valid' do
- expect(subject).to be true
- expect(valid_host_list).to be_a(Gitlab::Database::LoadBalancing::HostList)
- expect(valid_hosts).to contain_exactly(*hosts)
- end
- end
+ subject { lb.select_up_to_date_host(location) }
context 'when none of the replicas are caught up' do
before do
expect(hosts).to all(receive(:caught_up?).with(location).and_return(false))
end
- it 'returns false and does not set the valid hosts' do
+ it 'returns false and does not update the host thread-local variable' do
expect(subject).to be false
- expect(valid_host_list).to be_nil
+ expect(set_host).to be_nil
end
end
- context 'when one of the replicas is caught up' do
+ context 'when any of the replicas is caught up' do
before do
- expect(hosts[0]).to receive(:caught_up?).with(location).and_return(false)
+ # `allow` for non-caught up host, because we may not even check it, if will find the caught up one earlier
+ allow(hosts[0]).to receive(:caught_up?).with(location).and_return(false)
expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true)
end
- it 'returns true and sets one host to valid' do
+ it 'returns true and sets host thread-local variable' do
expect(subject).to be true
- expect(valid_host_list).to be_a(Gitlab::Database::LoadBalancing::HostList)
- expect(valid_hosts).to contain_exactly(hosts[1])
- end
-
- it 'host always returns the caught-up replica' do
- subject
-
- 3.times do
- expect(lb.host).to eq(hosts[1])
- RequestStore.delete(described_class::CACHE_KEY)
- end
+ expect(set_host).to eq(hosts[1])
end
end
end
- describe '#select_up_to_date_host' do
- let(:location) { 'AB/12345'}
- let(:hosts) { lb.host_list.hosts }
- let(:set_host) { RequestStore[described_class::CACHE_KEY] }
+ describe '#create_replica_connection_pool' do
+ it 'creates a new connection pool with specific pool size and name' do
+ with_replica_pool(5, 'other_host') do |replica_pool|
+ expect(replica_pool)
+ .to be_kind_of(ActiveRecord::ConnectionAdapters::ConnectionPool)
- subject { lb.select_up_to_date_host(location) }
-
- context 'when none of the replicas are caught up' do
- before do
- expect(hosts).to all(receive(:caught_up?).with(location).and_return(false))
+ expect(replica_pool.db_config.host).to eq('other_host')
+ expect(replica_pool.db_config.pool).to eq(5)
+ expect(replica_pool.db_config.name).to end_with("_replica")
end
+ end
- it 'returns false and does not update the host thread-local variable' do
- expect(subject).to be false
- expect(set_host).to be_nil
+ it 'allows setting of a custom hostname and port' do
+ with_replica_pool(5, 'other_host', 5432) do |replica_pool|
+ expect(replica_pool.db_config.host).to eq('other_host')
+ expect(replica_pool.db_config.configuration_hash[:port]).to eq(5432)
end
end
- context 'when any of the replicas is caught up' do
- before do
- # `allow` for non-caught up host, because we may not even check it, if will find the caught up one earlier
- allow(hosts[0]).to receive(:caught_up?).with(location).and_return(false)
- expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true)
- end
+ it 'does not modify connection class pool' do
+ expect { with_replica_pool(5) { } }.not_to change { ActiveRecord::Base.connection_pool }
+ end
- it 'returns true and sets host thread-local variable' do
- expect(subject).to be true
- expect(set_host).to eq(hosts[1])
+ def with_replica_pool(*args)
+ pool = lb.create_replica_connection_pool(*args)
+ yield pool
+ ensure
+ pool&.disconnect!
+ end
+ end
+
+ describe '#disconnect!' do
+ it 'calls disconnect on all hosts with a timeout', :skip_disconnect do
+ expect_next_instances_of(Gitlab::Database::LoadBalancing::Host, 2) do |host|
+ expect(host).to receive(:disconnect!).with(timeout: 30)
end
+
+ lb.disconnect!(timeout: 30)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index 9381ffa59fe..ea0c7f781fd 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -183,18 +183,17 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
describe '#clear' do
it 'clears the currently used host and session' do
lb = double(:lb)
- session = double(:session)
+ session = spy(:session)
allow(middleware).to receive(:load_balancer).and_return(lb)
expect(lb).to receive(:release_host)
- stub_const('Gitlab::Database::LoadBalancing::RackMiddleware::Session',
- session)
-
- expect(session).to receive(:clear_session)
+ stub_const('Gitlab::Database::LoadBalancing::Session', session)
middleware.clear
+
+ expect(session).to have_received(:clear_session)
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index 7fc7b5e8d11..a27341a3324 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -3,8 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
+ let(:load_balancer) { Gitlab::Database::LoadBalancing::LoadBalancer.new([]) }
let(:service) do
- described_class.new(nameserver: 'localhost', port: 8600, record: 'foo')
+ described_class.new(
+ nameserver: 'localhost',
+ port: 8600,
+ record: 'foo',
+ load_balancer: load_balancer
+ )
end
before do
@@ -18,7 +24,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
describe '#initialize' do
describe ':record_type' do
- subject { described_class.new(nameserver: 'localhost', port: 8600, record: 'foo', record_type: record_type) }
+ subject do
+ described_class.new(
+ nameserver: 'localhost',
+ port: 8600,
+ record: 'foo',
+ record_type: record_type,
+ load_balancer: load_balancer
+ )
+ end
context 'with a supported type' do
let(:record_type) { 'SRV' }
@@ -44,21 +58,17 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
end
it 'starts service discovery in a new thread' do
- expect(service)
- .to receive(:refresh_if_necessary)
- .and_return(5)
-
- expect(service)
- .to receive(:rand)
- .and_return(2)
+ expect(Thread).to receive(:new).ordered.and_call_original # Thread starts
- expect(service)
- .to receive(:sleep)
- .with(7)
+ expect(service).to receive(:perform_service_discovery).ordered.and_return(5)
+ expect(service).to receive(:rand).ordered.and_return(2)
+ expect(service).to receive(:sleep).ordered.with(7) # Sleep runs after thread starts
service.start.join
end
+ end
+ describe '#perform_service_discovery' do
it 'reports exceptions to Sentry' do
error = StandardError.new
@@ -70,15 +80,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
.to receive(:track_exception)
.with(error)
- expect(service)
- .to receive(:rand)
- .and_return(2)
-
- expect(service)
- .to receive(:sleep)
- .with(62)
-
- service.start.join
+ service.perform_service_discovery
end
end
@@ -155,14 +157,23 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
expect(host)
.to receive(:disconnect!)
- .with(2)
+ .with(timeout: 2)
service.replace_hosts([address_bar])
end
end
describe '#addresses_from_dns' do
- let(:service) { described_class.new(nameserver: 'localhost', port: 8600, record: 'foo', record_type: record_type) }
+ let(:service) do
+ described_class.new(
+ nameserver: 'localhost',
+ port: 8600,
+ record: 'foo',
+ record_type: record_type,
+ load_balancer: load_balancer
+ )
+ end
+
let(:packet) { double(:packet, answer: [res1, res2]) }
before do
@@ -234,13 +245,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
end
describe '#addresses_from_load_balancer' do
- it 'returns the ordered host names of the load balancer' do
- load_balancer = Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[b a])
-
- allow(service)
- .to receive(:load_balancer)
- .and_return(load_balancer)
+ let(:load_balancer) do
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[b a])
+ end
+ it 'returns the ordered host names of the load balancer' do
addresses = [
described_class::Address.new('a'),
described_class::Address.new('b')
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index 53445d73756..cf52e59db3a 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -237,7 +237,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
context 'when write location is nil' do
before do
- allow(Gitlab::Database).to receive(:get_write_location).and_return(nil)
+ allow(Gitlab::Database.main).to receive(:get_write_location).and_return(nil)
end
it 'does not update the write location' do
@@ -313,7 +313,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
end
it 'returns false and does not try to find caught up hosts' do
- expect(described_class).not_to receive(:select_caught_up_hosts)
+ expect(lb).not_to receive(:select_up_to_date_host)
expect(described_class.select_caught_up_replicas(:project, 42)).to be false
end
end
@@ -329,18 +329,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
expect(described_class).to receive(:unstick).with(:project, 42)
expect(described_class.select_caught_up_replicas(:project, 42)).to be true
end
-
- context 'when :load_balancing_refine_load_balancer_methods FF is disabled' do
- before do
- stub_feature_flags(load_balancing_refine_load_balancer_methods: false)
- end
-
- it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
- expect(lb).to receive(:select_caught_up_hosts).and_return(true)
- expect(described_class).to receive(:unstick).with(:project, 42)
- expect(described_class.select_caught_up_replicas(:project, 42)).to be true
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index 94717a10492..6ec8e0516f6 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -3,25 +3,28 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing do
- include_context 'clear DB Load Balancing configuration'
+ describe '.proxy' do
+ before do
+ @previous_proxy = ActiveRecord::Base.load_balancing_proxy
- before do
- stub_env('ENABLE_LOAD_BALANCING_FOR_FOSS', 'true')
- end
+ ActiveRecord::Base.load_balancing_proxy = connection_proxy
+ end
+
+ after do
+ ActiveRecord::Base.load_balancing_proxy = @previous_proxy
+ end
- describe '.proxy' do
context 'when configured' do
- before do
- allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
- subject.configure_proxy
- end
+ let(:connection_proxy) { double(:connection_proxy) }
it 'returns the connection proxy' do
- expect(subject.proxy).to be_an_instance_of(subject::ConnectionProxy)
+ expect(subject.proxy).to eq(connection_proxy)
end
end
context 'when not configured' do
+ let(:connection_proxy) { nil }
+
it 'returns nil' do
expect(subject.proxy).to be_nil
end
@@ -40,9 +43,9 @@ RSpec.describe Gitlab::Database::LoadBalancing do
it 'returns a Hash' do
lb_config = { 'hosts' => %w(foo) }
- original_db_config = Gitlab::Database.config
+ original_db_config = Gitlab::Database.main.config
modified_db_config = original_db_config.merge(load_balancing: lb_config)
- expect(Gitlab::Database).to receive(:config).and_return(modified_db_config)
+ expect(Gitlab::Database.main).to receive(:config).and_return(modified_db_config)
expect(described_class.configuration).to eq(lb_config)
end
@@ -132,7 +135,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
describe '.enable?' do
before do
- clear_load_balancing_configuration
allow(described_class).to receive(:hosts).and_return(%w(foo))
end
@@ -173,10 +175,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
describe '.configured?' do
- before do
- clear_load_balancing_configuration
- end
-
it 'returns true when Sidekiq is being used' do
allow(described_class).to receive(:hosts).and_return(%w(foo))
allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
@@ -207,12 +205,27 @@ RSpec.describe Gitlab::Database::LoadBalancing do
describe '.configure_proxy' do
it 'configures the connection proxy' do
- allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
+ allow(ActiveRecord::Base).to receive(:load_balancing_proxy=)
described_class.configure_proxy
- expect(ActiveRecord::Base.singleton_class).to have_received(:prepend)
- .with(Gitlab::Database::LoadBalancing::ActiveRecordProxy)
+ expect(ActiveRecord::Base).to have_received(:load_balancing_proxy=)
+ .with(Gitlab::Database::LoadBalancing::ConnectionProxy)
+ end
+
+ context 'when service discovery is enabled' do
+ let(:service_discovery) { double(Gitlab::Database::LoadBalancing::ServiceDiscovery) }
+
+ it 'runs initial service discovery when configuring the connection proxy' do
+ allow(described_class)
+ .to receive(:configuration)
+ .and_return('discover' => { 'record' => 'foo' })
+
+ expect(Gitlab::Database::LoadBalancing::ServiceDiscovery).to receive(:new).and_return(service_discovery)
+ expect(service_discovery).to receive(:perform_service_discovery)
+
+ described_class.configure_proxy
+ end
end
end
@@ -298,59 +311,46 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
describe '.db_role_for_connection' do
- let(:connection) { double(:conneciton) }
-
context 'when the load balancing is not configured' do
- before do
- allow(described_class).to receive(:enable?).and_return(false)
- end
+ let(:connection) { ActiveRecord::Base.connection }
it 'returns primary' do
- expect(described_class.db_role_for_connection(connection)).to be(:primary)
+ expect(described_class.db_role_for_connection(connection)).to eq(:primary)
end
end
- context 'when the load balancing is configured' do
- let(:proxy) { described_class::ConnectionProxy.new(%w(foo)) }
- let(:load_balancer) { described_class::LoadBalancer.new(%w(foo)) }
-
- before do
- allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
+ context 'when the NullPool is used for connection' do
+ let(:pool) { ActiveRecord::ConnectionAdapters::NullPool.new }
+ let(:connection) { double(:connection, pool: pool) }
- allow(described_class).to receive(:enable?).and_return(true)
- allow(described_class).to receive(:proxy).and_return(proxy)
- allow(proxy).to receive(:load_balancer).and_return(load_balancer)
-
- subject.configure_proxy(proxy)
+ it 'returns unknown' do
+ expect(described_class.db_role_for_connection(connection)).to eq(:unknown)
end
+ end
- context 'when the load balancer returns :replica' do
- it 'returns :replica' do
- allow(load_balancer).to receive(:db_role_for_connection).and_return(:replica)
-
- expect(described_class.db_role_for_connection(connection)).to be(:replica)
+ context 'when the load balancing is configured' do
+ let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
+ let(:proxy) { described_class::ConnectionProxy.new([db_host]) }
- expect(load_balancer).to have_received(:db_role_for_connection).with(connection)
+ context 'when a proxy connection is used' do
+ it 'returns :unknown' do
+ expect(described_class.db_role_for_connection(proxy)).to eq(:unknown)
end
end
- context 'when the load balancer returns :primary' do
- it 'returns :primary' do
- allow(load_balancer).to receive(:db_role_for_connection).and_return(:primary)
-
- expect(described_class.db_role_for_connection(connection)).to be(:primary)
-
- expect(load_balancer).to have_received(:db_role_for_connection).with(connection)
+ context 'when a read connection is used' do
+ it 'returns :replica' do
+ proxy.load_balancer.read do |connection|
+ expect(described_class.db_role_for_connection(connection)).to eq(:replica)
+ end
end
end
- context 'when the load balancer returns nil' do
- it 'returns nil' do
- allow(load_balancer).to receive(:db_role_for_connection).and_return(nil)
-
- expect(described_class.db_role_for_connection(connection)).to be(nil)
-
- expect(load_balancer).to have_received(:db_role_for_connection).with(connection)
+ context 'when a read_write connection is used' do
+ it 'returns :primary' do
+ proxy.load_balancer.read_write do |connection|
+ expect(described_class.db_role_for_connection(connection)).to eq(:primary)
+ end
end
end
end
@@ -366,7 +366,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do
# - In each test, we listen to the SQL queries (via sql.active_record
# instrumentation) while triggering real queries from the defined model.
# - We assert the desinations (replica/primary) of the queries in order.
- describe 'LoadBalancing integration tests', :delete do
+ describe 'LoadBalancing integration tests', :db_load_balancing, :delete do
before(:all) do
ActiveRecord::Schema.define do
create_table :load_balancing_test, force: true do |t|
@@ -381,30 +381,14 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
end
- shared_context 'LoadBalancing setup' do
- let(:development_db_config) { ActiveRecord::Base.configurations.configs_for(env_name: 'development').first.configuration_hash }
- let(:hosts) { [development_db_config[:host]] }
- let(:model) do
- Class.new(ApplicationRecord) do
- self.table_name = "load_balancing_test"
- end
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = "load_balancing_test"
end
+ end
- before do
- # Preloading testing class
- model.singleton_class.prepend ::Gitlab::Database::LoadBalancing::ActiveRecordProxy
-
- # Setup load balancing
- clear_load_balancing_configuration
- allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
- subject.configure_proxy(::Gitlab::Database::LoadBalancing::ConnectionProxy.new(hosts))
-
- original_db_config = Gitlab::Database.config
- modified_db_config = original_db_config.merge(load_balancing: { hosts: hosts })
- allow(Gitlab::Database).to receive(:config).and_return(modified_db_config)
-
- ::Gitlab::Database::LoadBalancing::Session.clear_session
- end
+ before do
+ model.singleton_class.prepend ::Gitlab::Database::LoadBalancing::ActiveRecordProxy
end
where(:queries, :include_transaction, :expected_results) do
@@ -715,8 +699,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
with_them do
- include_context 'LoadBalancing setup'
-
it 'redirects queries to the right roles' do
roles = []
@@ -785,8 +767,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
with_them do
- include_context 'LoadBalancing setup'
-
it 'redirects queries to the right roles' do
roles = []
@@ -805,8 +785,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
context 'a write inside a transaction inside fallback_to_replicas_for_ambiguous_queries block' do
- include_context 'LoadBalancing setup'
-
it 'raises an exception' do
expect do
::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 8e25f9249fe..9f9aef77de7 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -278,6 +278,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_index(:users, :foo, unique: true)
end
+
+ it 'unprepares the async index creation' do
+ expect(model).to receive(:add_index)
+ .with(:users, :foo, algorithm: :concurrently)
+
+ expect(model).to receive(:unprepare_async_index)
+ .with(:users, :foo, algorithm: :concurrently)
+
+ model.add_concurrent_index(:users, :foo)
+ end
end
context 'inside a transaction' do
@@ -314,6 +324,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.remove_concurrent_index(:users, :foo, unique: true)
end
+ it 'unprepares the async index creation' do
+ expect(model).to receive(:remove_index)
+ .with(:users, { algorithm: :concurrently, column: :foo })
+
+ expect(model).to receive(:unprepare_async_index)
+ .with(:users, :foo, { algorithm: :concurrently })
+
+ model.remove_concurrent_index(:users, :foo)
+ end
+
describe 'by index name' do
before do
allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true)
@@ -345,6 +365,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.remove_concurrent_index_by_name(:users, wrong_key: "index_x_by_y")
end.to raise_error 'remove_concurrent_index_by_name must get an index name as the second argument'
end
+
+ it 'unprepares the async index creation' do
+ expect(model).to receive(:remove_index)
+ .with(:users, { algorithm: :concurrently, name: "index_x_by_y" })
+
+ expect(model).to receive(:unprepare_async_index_by_name)
+ .with(:users, "index_x_by_y", { algorithm: :concurrently })
+
+ model.remove_concurrent_index_by_name(:users, "index_x_by_y")
+ end
end
end
end
@@ -384,9 +414,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
expect(model).to receive(:execute).with(/REFERENCES users \(id\)/)
@@ -398,9 +428,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
expect(model).to receive(:execute).with(/REFERENCES users \(id_convert_to_bigint\)/)
@@ -416,9 +446,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
@@ -433,9 +463,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
expect(model).to receive(:execute).with(/ON DELETE CASCADE/)
@@ -450,9 +480,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
expect(model).not_to receive(:execute).with(/ON DELETE/)
@@ -468,10 +498,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
@@ -497,10 +527,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
end
@@ -527,10 +557,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+bar/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :bar)
end
@@ -556,6 +586,22 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it_behaves_like 'performs validation', {}
end
end
+
+ context 'when the reverse_lock_order flag is set' do
+ it 'explicitly locks the tables in target-source order', :aggregate_failures do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ expect(model).to receive(:execute).with('LOCK TABLE users, projects IN SHARE ROW EXCLUSIVE MODE')
+ expect(model).to receive(:execute).with(/REFERENCES users \(id\)/)
+
+ model.add_concurrent_foreign_key(:projects, :users, column: :user_id, reverse_lock_order: true)
+ end
+ end
end
end
@@ -568,9 +614,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).not_to receive(:concurrent_foreign_key_name)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
end
model.validate_foreign_key(:projects, :user_id, name: :foo)
@@ -585,9 +631,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:concurrent_foreign_key_name)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
end
model.validate_foreign_key(:projects, :user_id)
@@ -702,7 +748,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
after do
- model.execute('RESET ALL')
+ model.execute('RESET statement_timeout')
end
it 'defines statement to 0 only for current transaction' do
@@ -719,7 +765,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when passing a blocks' do
it 'disables statement timeouts on session level and executes the block' do
expect(model).to receive(:execute).with('SET statement_timeout TO 0')
- expect(model).to receive(:execute).with('RESET ALL').at_least(:once)
+ expect(model).to receive(:execute).with('RESET statement_timeout').at_least(:once)
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
@@ -731,7 +777,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
after do
- model.execute('RESET ALL')
+ model.execute('RESET statement_timeout')
end
it 'defines statement to 0 for any code run inside the block' do
@@ -758,12 +804,12 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
after do
# Use ActiveRecord::Base.connection instead of model.execute
# so that this call is not counted below
- ActiveRecord::Base.connection.execute('RESET ALL')
+ ActiveRecord::Base.connection.execute('RESET statement_timeout')
end
it 'yields control without disabling the timeout or resetting' do
expect(model).not_to receive(:execute).with('SET statement_timeout TO 0')
- expect(model).not_to receive(:execute).with('RESET ALL')
+ expect(model).not_to receive(:execute).with('RESET statement_timeout')
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
@@ -2486,7 +2532,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
@@ -2496,7 +2542,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.and_return(true).exactly(1)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_check_constraint(
:test_table,
@@ -2530,7 +2576,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
@@ -2539,7 +2585,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.and_return(true).exactly(1)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_check_constraint(
:test_table,
@@ -2572,9 +2618,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:check_constraint_exists?).and_return(true)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(validate_sql)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.validate_check_constraint(:test_table, 'check_name')
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index e096e7f6e91..1a7116e75e5 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -581,4 +581,101 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
model.delete_queued_jobs('BackgroundMigrationClassName')
end
end
+
+ describe '#finalized_background_migration' do
+ include_context 'background migration job class'
+
+ let!(:tracked_pending_job) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1]) }
+ let!(:tracked_successful_job) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [2]) }
+
+ before do
+ Sidekiq::Testing.disable! do
+ BackgroundMigrationWorker.perform_async(job_class_name, [1, 2])
+ BackgroundMigrationWorker.perform_async(job_class_name, [3, 4])
+ BackgroundMigrationWorker.perform_in(10, job_class_name, [5, 6])
+ BackgroundMigrationWorker.perform_in(20, job_class_name, [7, 8])
+ end
+ end
+
+ it_behaves_like 'finalized tracked background migration' do
+ before do
+ model.finalize_background_migration(job_class_name)
+ end
+ end
+
+ context 'when removing all tracked job records' do
+ # Force pending jobs to remain pending.
+ let!(:job_perform_method) { ->(*arguments) { } }
+
+ before do
+ model.finalize_background_migration(job_class_name, delete_tracking_jobs: %w[pending succeeded])
+ end
+
+ it_behaves_like 'finalized tracked background migration'
+ it_behaves_like 'removed tracked jobs', 'pending'
+ it_behaves_like 'removed tracked jobs', 'succeeded'
+ end
+
+ context 'when retaining all tracked job records' do
+ before do
+ model.finalize_background_migration(job_class_name, delete_tracking_jobs: false)
+ end
+
+ it_behaves_like 'finalized background migration'
+ include_examples 'retained tracked jobs', 'succeeded'
+ end
+
+ context 'during retry race condition' do
+ let(:queue_items_added) { [] }
+ let!(:job_perform_method) do
+ ->(*arguments) do
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ RSpec.current_example.example_group_instance.job_class_name,
+ arguments
+ )
+
+ # Mock another process pushing queue jobs.
+ queue_items_added = RSpec.current_example.example_group_instance.queue_items_added
+ if queue_items_added.count < 10
+ Sidekiq::Testing.disable! do
+ job_class_name = RSpec.current_example.example_group_instance.job_class_name
+ queue_items_added << BackgroundMigrationWorker.perform_async(job_class_name, [Time.current])
+ queue_items_added << BackgroundMigrationWorker.perform_in(10, job_class_name, [Time.current])
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'finalized tracked background migration' do
+ before do
+ model.finalize_background_migration(job_class_name, delete_tracking_jobs: ['succeeded'])
+ end
+ end
+ end
+ end
+
+ describe '#delete_job_tracking' do
+ let!(:job_class_name) { 'TestJob' }
+
+ let!(:tracked_pending_job) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1]) }
+ let!(:tracked_successful_job) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [2]) }
+
+ context 'with default status' do
+ before do
+ model.delete_job_tracking(job_class_name)
+ end
+
+ include_examples 'retained tracked jobs', 'pending'
+ include_examples 'removed tracked jobs', 'succeeded'
+ end
+
+ context 'with explicit status' do
+ before do
+ model.delete_job_tracking(job_class_name, status: %w[pending succeeded])
+ end
+
+ include_examples 'removed tracked jobs', 'pending'
+ include_examples 'removed tracked jobs', 'succeeded'
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 6d047eed3bb..5945e5a2039 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -5,24 +5,35 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
describe '#observe' do
subject { described_class.new }
- let(:migration) { 1234 }
+ let(:migration_name) { 'test' }
+ let(:migration_version) { '12345' }
it 'executes the given block' do
- expect { |b| subject.observe(migration, &b) }.to yield_control
+ expect { |b| subject.observe(version: migration_version, name: migration_name, &b) }.to yield_control
end
context 'behavior with observers' do
- subject { described_class.new(observers).observe(migration) {} }
+ subject { described_class.new([Gitlab::Database::Migrations::Observers::MigrationObserver]).observe(version: migration_version, name: migration_name) {} }
- let(:observers) { [observer] }
let(:observer) { instance_double('Gitlab::Database::Migrations::Observers::MigrationObserver', before: nil, after: nil, record: nil) }
+ before do
+ allow(Gitlab::Database::Migrations::Observers::MigrationObserver).to receive(:new).and_return(observer)
+ end
+
+ it 'instantiates observer with observation' do
+ expect(Gitlab::Database::Migrations::Observers::MigrationObserver)
+ .to receive(:new)
+ .with(instance_of(Gitlab::Database::Migrations::Observation)) { |observation| expect(observation.version).to eq(migration_version) }
+ .and_return(observer)
+
+ subject
+ end
+
it 'calls #before, #after, #record on given observers' do
expect(observer).to receive(:before).ordered
expect(observer).to receive(:after).ordered
- expect(observer).to receive(:record).ordered do |observation|
- expect(observation.migration).to eq(migration)
- end
+ expect(observer).to receive(:record).ordered
subject
end
@@ -47,7 +58,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'on successful execution' do
- subject { described_class.new.observe(migration) {} }
+ subject { described_class.new.observe(version: migration_version, name: migration_name) {} }
it 'records walltime' do
expect(subject.walltime).not_to be_nil
@@ -58,12 +69,16 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
it 'records the migration version' do
- expect(subject.migration).to eq(migration)
+ expect(subject.version).to eq(migration_version)
+ end
+
+ it 'records the migration name' do
+ expect(subject.name).to eq(migration_name)
end
end
context 'upon failure' do
- subject { described_class.new.observe(migration) { raise 'something went wrong' } }
+ subject { described_class.new.observe(version: migration_version, name: migration_name) { raise 'something went wrong' } }
it 'raises the exception' do
expect { subject }.to raise_error(/something went wrong/)
@@ -73,7 +88,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
subject { instance.observations.first }
before do
- instance.observe(migration) { raise 'something went wrong' }
+ instance.observe(version: migration_version, name: migration_name) { raise 'something went wrong' }
rescue StandardError
# ignore
end
@@ -89,7 +104,11 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
it 'records the migration version' do
- expect(subject.migration).to eq(migration)
+ expect(subject.version).to eq(migration_version)
+ end
+
+ it 'records the migration name' do
+ expect(subject.name).to eq(migration_name)
end
end
end
@@ -101,8 +120,8 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
let(:migration2) { double('migration2', call: nil) }
it 'records observations for all migrations' do
- subject.observe('migration1') {}
- subject.observe('migration2') { raise 'something went wrong' } rescue nil
+ subject.observe(version: migration_version, name: migration_name) {}
+ subject.observe(version: migration_version, name: migration_name) { raise 'something went wrong' } rescue nil
expect(subject.observations.size).to eq(2)
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
index 8aac3ed67c6..36885a1594f 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
@@ -2,16 +2,17 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
- subject { described_class.new }
+ subject { described_class.new(observation) }
- let(:observation) { Gitlab::Database::Migrations::Observation.new(migration) }
+ let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
let(:connection) { ActiveRecord::Base.connection }
let(:query) { "select date_trunc('day', $1::timestamptz) + $2 * (interval '1 hour')" }
let(:query_binds) { [Time.current, 3] }
let(:directory_path) { Dir.mktmpdir }
- let(:log_file) { "#{directory_path}/#{migration}-query-details.json" }
+ let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-query-details.json" }
let(:query_details) { Gitlab::Json.parse(File.read(log_file)) }
- let(:migration) { 20210422152437 }
+ let(:migration_version) { 20210422152437 }
+ let(:migration_name) { 'test' }
before do
stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path)
@@ -49,7 +50,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
subject.before
run_query
subject.after
- subject.record(observation)
+ subject.record
end
def run_query
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
index 195e7114582..2a49d8e8b73 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
- subject { described_class.new }
+ subject { described_class.new(observation) }
- let(:observation) { Gitlab::Database::Migrations::Observation.new(migration) }
+ let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
let(:connection) { ActiveRecord::Base.connection }
let(:query) { 'select 1' }
let(:directory_path) { Dir.mktmpdir }
- let(:log_file) { "#{directory_path}/current.log" }
- let(:migration) { 20210422152437 }
+ let(:migration_version) { 20210422152437 }
+ let(:migration_name) { 'test' }
before do
stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path)
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
it 'writes a file with the query log' do
observe
- expect(File.read("#{directory_path}/#{migration}.log")).to include(query)
+ expect(File.read("#{directory_path}/#{migration_version}_#{migration_name}.log")).to include(query)
end
it 'does not change the default logger' do
@@ -33,6 +33,6 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
subject.before
connection.execute(query)
subject.after
- subject.record(observation)
+ subject.record
end
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
index a3b03050b33..32a25fdaa28 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
@@ -2,8 +2,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
- subject { described_class.new }
+ subject { described_class.new(observation) }
+ let(:observation) { Gitlab::Database::Migrations::Observation.new }
let(:connection) { ActiveRecord::Base.connection }
def mock_pgss(enabled: true)
@@ -37,7 +38,6 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
end
describe '#record' do
- let(:observation) { Gitlab::Database::Migrations::Observation.new }
let(:result) { double }
let(:pgss_query) do
<<~SQL
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
mock_pgss(enabled: true)
expect(connection).to receive(:execute).with(pgss_query).once.and_return(result)
- expect { subject.record(observation) }.to change { observation.query_statistics }.from(nil).to(result)
+ expect { subject.record }.to change { observation.query_statistics }.from(nil).to(result)
end
end
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
mock_pgss(enabled: false)
expect(connection).not_to receive(:execute)
- expect { subject.record(observation) }.not_to change { observation.query_statistics }
+ expect { subject.record }.not_to change { observation.query_statistics }
end
end
end
diff --git a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
index 73466471944..61e28003e66 100644
--- a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::TotalDatabaseSizeChange do
- subject { described_class.new }
+ subject { described_class.new(observation) }
let(:observation) { Gitlab::Database::Migrations::Observation.new }
let(:connection) { ActiveRecord::Base.connection }
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::TotalDatabaseSizeChange
subject.before
subject.after
- subject.record(observation)
+ subject.record
expect(observation.total_database_size_change).to eq(256 - 1024)
end
@@ -27,13 +27,13 @@ RSpec.describe Gitlab::Database::Migrations::Observers::TotalDatabaseSizeChange
it 'does not record anything if before size is unknown' do
subject.after
- expect { subject.record(observation) }.not_to change { observation.total_database_size_change }
+ expect { subject.record }.not_to change { observation.total_database_size_change }
end
it 'does not record anything if after size is unknown' do
subject.before
- expect { subject.record(observation) }.not_to change { observation.total_database_size_change }
+ expect { subject.record }.not_to change { observation.total_database_size_change }
end
end
end
diff --git a/spec/lib/gitlab/database/multi_threaded_migration_spec.rb b/spec/lib/gitlab/database/multi_threaded_migration_spec.rb
deleted file mode 100644
index 78dd9e88064..00000000000
--- a/spec/lib/gitlab/database/multi_threaded_migration_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::MultiThreadedMigration do
- let(:migration) do
- Class.new { include Gitlab::Database::MultiThreadedMigration }.new
- end
-
- describe '#connection' do
- after do
- Thread.current[described_class::MULTI_THREAD_AR_CONNECTION] = nil
- end
-
- it 'returns the thread-local connection if present' do
- Thread.current[described_class::MULTI_THREAD_AR_CONNECTION] = 10
-
- expect(migration.connection).to eq(10)
- end
-
- it 'returns the global connection if no thread-local connection was set' do
- expect(migration.connection).to eq(ActiveRecord::Base.connection)
- end
- end
-
- describe '#with_multiple_threads' do
- it 'starts multiple threads and yields the supplied block in every thread' do
- output = Queue.new
-
- migration.with_multiple_threads(2) do
- output << migration.connection.execute('SELECT 1')
- end
-
- expect(output.size).to eq(2)
- end
-
- it 'joins the threads when the join parameter is set' do
- expect_any_instance_of(Thread).to receive(:join).and_call_original
-
- migration.with_multiple_threads(1) { }
- end
- end
-end
diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
new file mode 100644
index 00000000000..8523b7104f0
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
@@ -0,0 +1,181 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
+ include Database::TableSchemaHelpers
+
+ let(:connection) { ActiveRecord::Base.connection }
+
+ def expect_partition_present(name)
+ aggregate_failures do
+ expect(table_oid(name)).not_to be_nil
+ expect(Postgresql::DetachedPartition.find_by(table_name: name)).not_to be_nil
+ end
+ end
+
+ def expect_partition_removed(name)
+ aggregate_failures do
+ expect(table_oid(name)).to be_nil
+ expect(Postgresql::DetachedPartition.find_by(table_name: name)).to be_nil
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE parent_table (
+ id bigserial not null,
+ created_at timestamptz not null,
+ primary key (id, created_at)
+ ) PARTITION BY RANGE(created_at)
+ SQL
+ end
+
+ def create_partition(name:, table: 'parent_table', from:, to:, attached:, drop_after:)
+ from = from.beginning_of_month
+ to = to.beginning_of_month
+ full_name = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{name}"
+ connection.execute(<<~SQL)
+ CREATE TABLE #{full_name}
+ PARTITION OF #{table}
+ FOR VALUES FROM ('#{from.strftime('%Y-%m-%d')}') TO ('#{to.strftime('%Y-%m-%d')}')
+ SQL
+
+ unless attached
+ connection.execute(<<~SQL)
+ ALTER TABLE #{table} DETACH PARTITION #{full_name}
+ SQL
+ end
+
+ Postgresql::DetachedPartition.create!(table_name: name,
+ drop_after: drop_after)
+ end
+
+ describe '#perform' do
+ context 'when the partition should not be dropped yet' do
+ it 'does not drop the partition' do
+ create_partition(name: 'test_partition',
+ from: 2.months.ago, to: 1.month.ago,
+ attached: false,
+ drop_after: 1.day.from_now)
+
+ subject.perform
+
+ expect_partition_present('test_partition')
+ end
+ end
+
+ context 'with a partition to drop' do
+ before do
+ create_partition(name: 'test_partition',
+ from: 2.months.ago,
+ to: 1.month.ago.beginning_of_month,
+ attached: false,
+ drop_after: 1.second.ago)
+ end
+
+ it 'drops the partition' do
+ subject.perform
+
+ expect(table_oid('test_partition')).to be_nil
+ end
+
+ context 'when the drop_detached_partitions feature flag is disabled' do
+ before do
+ stub_feature_flags(drop_detached_partitions: false)
+ end
+ it 'does not drop the partition' do
+ subject.perform
+
+ expect(table_oid('test_partition')).not_to be_nil
+ end
+ end
+
+ context 'when another process drops the table while the first waits for a lock' do
+ it 'skips the table' do
+ # Rspec's receive_method_chain does not support .and_wrap_original, so we need to nest here.
+ expect(Postgresql::DetachedPartition).to receive(:lock).and_wrap_original do |lock_meth|
+ locked = lock_meth.call
+ expect(locked).to receive(:find_by).and_wrap_original do |find_meth, *find_args|
+ # Another process drops the table then deletes this entry
+ Postgresql::DetachedPartition.where(*find_args).delete_all
+ find_meth.call(*find_args)
+ end
+
+ locked
+ end
+
+ expect(subject).not_to receive(:drop_one)
+
+ subject.perform
+ end
+ end
+ end
+
+ context 'when the partition to drop is still attached to its table' do
+ before do
+ create_partition(name: 'test_partition',
+ from: 2.months.ago,
+ to: 1.month.ago.beginning_of_month,
+ attached: true,
+ drop_after: 1.second.ago)
+ end
+
+ it 'does not drop the partition, but does remove the DetachedPartition entry' do
+ subject.perform
+ aggregate_failures do
+ expect(table_oid('test_partition')).not_to be_nil
+ expect(Postgresql::DetachedPartition.find_by(table_name: 'test_partition')).to be_nil
+ end
+ end
+
+ it 'removes the detached_partition entry' do
+ detached_partition = Postgresql::DetachedPartition.find_by!(table_name: 'test_partition')
+
+ subject.perform
+
+ expect(Postgresql::DetachedPartition.exists?(id: detached_partition.id)).to be_falsey
+ end
+ end
+
+ context 'with multiple partitions to drop' do
+ before do
+ create_partition(name: 'partition_1',
+ from: 3.months.ago,
+ to: 2.months.ago,
+ attached: false,
+ drop_after: 1.second.ago)
+
+ create_partition(name: 'partition_2',
+ from: 2.months.ago,
+ to: 1.month.ago,
+ attached: false,
+ drop_after: 1.second.ago)
+ end
+
+ it 'drops both partitions' do
+ subject.perform
+
+ expect_partition_removed('partition_1')
+ expect_partition_removed('partition_2')
+ end
+
+ context 'when the first drop returns an error' do
+ it 'still drops the second partition' do
+ expect(subject).to receive(:drop_one).ordered.and_raise('injected error')
+ expect(subject).to receive(:drop_one).ordered.and_call_original
+
+ subject.perform
+
+ # We don't know which partition we tried to drop first, so the tests here have to work with either one
+ expect(Postgresql::DetachedPartition.count).to eq(1)
+ errored_partition_name = Postgresql::DetachedPartition.first!.table_name
+
+ dropped_partition_name = (%w[partition_1 partition_2] - [errored_partition_name]).first
+ expect_partition_present(errored_partition_name)
+ expect_partition_removed(dropped_partition_name)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index f9dca371398..c4fbf53d1c2 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -237,16 +237,6 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
expect(subject).to contain_exactly(min_value_to_may)
end
-
- context 'when the feature flag is toggled off' do
- before do
- stub_feature_flags(partition_pruning_dry_run: false)
- end
-
- it 'is empty' do
- expect(subject).to eq([])
- end
- end
end
context 'with a time retention policy of 2 months' do
@@ -258,16 +248,6 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
)
end
-
- context 'when the feature flag is toggled off' do
- before do
- stub_feature_flags(partition_pruning_dry_run: false)
- end
-
- it 'is empty' do
- expect(subject).to eq([])
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 903a41d6dd2..3d60457c3a9 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -4,9 +4,14 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
include Database::PartitioningHelpers
- include Database::TableSchemaHelpers
include ExclusiveLeaseHelpers
+ def has_partition(model, month)
+ Gitlab::Database::PostgresPartition.for_parent_table(model.table_name).any? do |partition|
+ Gitlab::Database::Partitioning::TimePartition.from_sql(model.table_name, partition.name, partition.condition).from == month
+ end
+ end
+
describe '.register' do
let(:model) { double(partitioning_strategy: nil) }
@@ -111,14 +116,14 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:extra_partitions) do
[
- instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo1'),
- instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo2')
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo1', to_detach_sql: 'SELECT 1'),
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo2', to_detach_sql: 'SELECT 2')
]
end
- context 'with the partition_pruning_dry_run feature flag enabled' do
+ context 'with the partition_pruning feature flag enabled' do
before do
- stub_feature_flags(partition_pruning_dry_run: true)
+ stub_feature_flags(partition_pruning: true)
end
it 'detaches each extra partition' do
@@ -146,9 +151,9 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
end
- context 'with the partition_pruning_dry_run feature flag disabled' do
+ context 'with the partition_pruning feature flag disabled' do
before do
- stub_feature_flags(partition_pruning_dry_run: false)
+ stub_feature_flags(partition_pruning: false)
end
it 'returns immediately' do
@@ -158,4 +163,128 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
end
end
+
+ describe '#detach_partitions' do
+ around do |ex|
+ travel_to(Date.parse('2021-06-23')) do
+ ex.run
+ end
+ end
+
+ subject { described_class.new([my_model]).sync_partitions }
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = 'my_model_example_table'
+
+ partitioned_by :created_at, strategy: :monthly, retain_for: 1.month
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE my_model_example_table
+ (id serial not null, created_at timestamptz not null, primary key (id, created_at))
+ PARTITION BY RANGE (created_at);
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.my_model_example_table_202104
+ PARTITION OF my_model_example_table
+ FOR VALUES FROM ('2021-04-01') TO ('2021-05-01');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.my_model_example_table_202105
+ PARTITION OF my_model_example_table
+ FOR VALUES FROM ('2021-05-01') TO ('2021-06-01');
+ SQL
+
+ # Also create all future partitions so that the sync is only trying to detach old partitions
+ my_model.partitioning_strategy.missing_partitions.each do |p|
+ connection.execute p.to_sql
+ end
+ end
+
+ def num_tables
+ connection.select_value(<<~SQL)
+ SELECT COUNT(*)
+ FROM pg_class
+ where relkind IN ('r', 'p')
+ SQL
+ end
+
+ it 'detaches exactly one partition' do
+ expect { subject }.to change { find_partitions(my_model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size }.from(9).to(8)
+ end
+
+ it 'detaches the old partition' do
+ expect { subject }.to change { has_partition(my_model, 2.months.ago.beginning_of_month) }.from(true).to(false)
+ end
+
+ it 'deletes zero tables' do
+ expect { subject }.not_to change { num_tables }
+ end
+
+ it 'creates the appropriate PendingPartitionDrop entry' do
+ subject
+
+ pending_drop = Postgresql::DetachedPartition.find_by!(table_name: 'my_model_example_table_202104')
+ expect(pending_drop.drop_after).to eq(Time.current + described_class::RETAIN_DETACHED_PARTITIONS_FOR)
+ end
+
+ # Postgres 11 does not support foreign keys to partitioned tables
+ if Gitlab::Database.main.version.to_f >= 12
+ context 'when the model is the target of a foreign key' do
+ before do
+ connection.execute(<<~SQL)
+ create unique index idx_for_fk ON my_model_example_table(created_at);
+
+ create table referencing_table (
+ id bigserial primary key not null,
+ referencing_created_at timestamptz references my_model_example_table(created_at)
+ );
+ SQL
+ end
+
+ it 'does not detach partitions with a referenced foreign key' do
+ expect { subject }.not_to change { find_partitions(my_model.table_name).size }
+ end
+ end
+ end
+ end
+
+ context 'creating and then detaching partitions for a table' do
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = 'my_model_example_table'
+
+ partitioned_by :created_at, strategy: :monthly, retain_for: 1.month
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE my_model_example_table
+ (id serial not null, created_at timestamptz not null, primary key (id, created_at))
+ PARTITION BY RANGE (created_at);
+ SQL
+ end
+
+ def num_partitions(model)
+ find_partitions(model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size
+ end
+
+ it 'creates partitions for the future then drops the oldest one after a month' do
+ # 1 month for the current month, 1 month for the old month that we're retaining data for, headroom
+ expected_num_partitions = (Gitlab::Database::Partitioning::MonthlyStrategy::HEADROOM + 2.months) / 1.month
+ expect { described_class.new([my_model]).sync_partitions }.to change { num_partitions(my_model) }.from(0).to(expected_num_partitions)
+
+ travel 1.month
+
+ expect { described_class.new([my_model]).sync_partitions }.to change { has_partition(my_model, 2.months.ago.beginning_of_month) }.from(true).to(false).and(change { num_partitions(my_model) }.by(0))
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb b/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb
index 67596211f71..7024cbd55ff 100644
--- a/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionMonitoring do
let(:models) { [model] }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
- let(:partitioning_strategy) { double(missing_partitions: missing_partitions, current_partitions: current_partitions) }
+ let(:partitioning_strategy) { double(missing_partitions: missing_partitions, current_partitions: current_partitions, extra_partitions: extra_partitions) }
let(:table) { "some_table" }
let(:missing_partitions) do
@@ -19,6 +19,10 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionMonitoring do
[double, double]
end
+ let(:extra_partitions) do
+ [double, double, double]
+ end
+
it 'reports number of present partitions' do
subject
@@ -30,5 +34,11 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionMonitoring do
expect(Gitlab::Metrics.registry.get(:db_partitions_missing).get({ table: table })).to eq(missing_partitions.size)
end
+
+ it 'reports number of extra partitions' do
+ subject
+
+ expect(Gitlab::Metrics.registry.get(:db_partitions_extra).get({ table: table })).to eq(extra_partitions.size)
+ end
end
end
diff --git a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb
new file mode 100644
index 00000000000..ec39e5bfee7
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model do
+ # PostgresForeignKey does not `behaves_like 'a postgres model'` because it does not correspond 1-1 with a single entry
+ # in pg_class
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE public.referenced_table (
+ id bigserial primary key not null
+ );
+
+ CREATE TABLE public.other_referenced_table (
+ id bigserial primary key not null
+ );
+
+ CREATE TABLE public.constrained_table (
+ id bigserial primary key not null,
+ referenced_table_id bigint not null,
+ other_referenced_table_id bigint not null,
+ CONSTRAINT fk_constrained_to_referenced FOREIGN KEY(referenced_table_id) REFERENCES referenced_table(id),
+ CONSTRAINT fk_constrained_to_other_referenced FOREIGN KEY(other_referenced_table_id)
+ REFERENCES other_referenced_table(id)
+ );
+ SQL
+ end
+
+ describe '#by_referenced_table_identifier' do
+ it 'throws an error when the identifier name is not fully qualified' do
+ expect { described_class.by_referenced_table_identifier('referenced_table') }.to raise_error(ArgumentError, /not fully qualified/)
+ end
+
+ it 'finds the foreign keys for the referenced table' do
+ expected = described_class.find_by!(name: 'fk_constrained_to_referenced')
+
+ expect(described_class.by_referenced_table_identifier('public.referenced_table')).to contain_exactly(expected)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index e1832219ebf..9088719d5a4 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -40,6 +40,37 @@ RSpec.describe Gitlab::Database::PostgresIndex do
expect(types & %w(btree gist)).to eq(types)
end
+
+ context 'with leftover indexes' do
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE INDEX foobar_ccnew ON users (id);
+ CREATE INDEX foobar_ccnew1 ON users (id);
+ SQL
+ end
+
+ subject { described_class.reindexing_support.map(&:name) }
+
+ it 'excludes temporary indexes from reindexing' do
+ expect(subject).not_to include('foobar_ccnew')
+ expect(subject).not_to include('foobar_ccnew1')
+ end
+ end
+ end
+
+ describe '.reindexing_leftovers' do
+ subject { described_class.reindexing_leftovers }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE INDEX foobar_ccnew ON users (id);
+ CREATE INDEX foobar_ccnew1 ON users (id);
+ SQL
+ end
+
+ it 'retrieves leftover indexes matching the /_ccnew[0-9]*$/ pattern' do
+ expect(subject.map(&:name)).to eq(%w(foobar_ccnew foobar_ccnew1))
+ end
end
describe '.not_match' do
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 8aff99544ca..550f9db2b5b 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -26,14 +26,31 @@ RSpec.describe Gitlab::Database::Reindexing do
end
end
- describe '.candidate_indexes' do
- subject { described_class.candidate_indexes }
+ describe '.cleanup_leftovers!' do
+ subject { described_class.cleanup_leftovers! }
+
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE INDEX foobar_ccnew ON users (id);
+ CREATE INDEX foobar_ccnew1 ON users (id);
+ SQL
+ end
- it 'retrieves regular indexes that are no left-overs from previous runs' do
- result = double
- expect(Gitlab::Database::PostgresIndex).to receive_message_chain('not_match.reindexing_support').with('\_ccnew[0-9]*$').with(no_args).and_return(result)
+ it 'drops both leftover indexes' do
+ expect_query("SET lock_timeout TO '60000ms'")
+ expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"foobar_ccnew\"")
+ expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
+ expect_query("SET lock_timeout TO '60000ms'")
+ expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"foobar_ccnew1\"")
+ expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
- expect(subject).to eq(result)
+ subject
+ end
+
+ def expect_query(sql)
+ expect(ApplicationRecord.connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
index f3bed9b40d6..1f1943d00a3 100644
--- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::SchemaMigrations::Context do
- let(:connection) { ActiveRecord::Base.connection }
+ let(:connection_class) { ActiveRecord::Base }
+ let(:connection) { connection_class.connection }
let(:context) { described_class.new(connection) }
@@ -12,13 +13,65 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
expect(context.schema_directory).to eq(File.join(Rails.root, 'db/schema_migrations'))
end
- context 'multiple databases' do
- let(:connection) { Ci::BaseModel.connection }
+ context 'CI database' do
+ let(:connection_class) { Ci::CiDatabaseRecord }
it 'returns a directory path that is database specific' do
skip_if_multiple_databases_not_setup
- expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations'))
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/schema_migrations'))
+ end
+ end
+
+ context 'multiple databases' do
+ let(:connection_class) do
+ Class.new(::ApplicationRecord) do
+ self.abstract_class = true
+
+ def self.name
+ 'Gitlab::Database::SchemaMigrations::Context::TestConnection'
+ end
+ end
+ end
+
+ let(:configuration_overrides) { {} }
+
+ before do
+ connection_class.establish_connection(
+ ActiveRecord::Base
+ .connection_pool
+ .db_config
+ .configuration_hash
+ .merge(configuration_overrides)
+ )
+ end
+
+ after do
+ connection_class.remove_connection
+ end
+
+ context 'when `schema_migrations_path` is configured as string' do
+ let(:configuration_overrides) do
+ { "schema_migrations_path" => "db/ci_schema_migrations" }
+ end
+
+ it 'returns a configured directory path that' do
+ skip_if_multiple_databases_not_setup
+
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations'))
+ end
+ end
+
+ context 'when `schema_migrations_path` is configured as symbol' do
+ let(:configuration_overrides) do
+ { schema_migrations_path: "db/ci_schema_migrations" }
+ end
+
+ it 'returns a configured directory path that' do
+ skip_if_multiple_databases_not_setup
+
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations'))
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/transaction/context_spec.rb b/spec/lib/gitlab/database/transaction/context_spec.rb
new file mode 100644
index 00000000000..65d52b4d099
--- /dev/null
+++ b/spec/lib/gitlab/database/transaction/context_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Transaction::Context do
+ subject { described_class.new }
+
+ let(:data) { subject.context }
+
+ before do
+ stub_const("#{described_class}::LOG_THROTTLE", 100)
+ end
+
+ describe '#set_start_time' do
+ before do
+ subject.set_start_time
+ end
+
+ it 'sets start_time' do
+ expect(data).to have_key(:start_time)
+ end
+ end
+
+ describe '#increment_savepoints' do
+ before do
+ 2.times { subject.increment_savepoints }
+ end
+
+ it { expect(data[:savepoints]).to eq(2) }
+ end
+
+ describe '#increment_rollbacks' do
+ before do
+ 3.times { subject.increment_rollbacks }
+ end
+
+ it { expect(data[:rollbacks]).to eq(3) }
+ end
+
+ describe '#increment_releases' do
+ before do
+ 4.times { subject.increment_releases }
+ end
+
+ it { expect(data[:releases]).to eq(4) }
+ end
+
+ describe '#set_depth' do
+ before do
+ subject.set_depth(2)
+ end
+
+ it { expect(data[:depth]).to eq(2) }
+ end
+
+ describe '#track_sql' do
+ before do
+ subject.track_sql('SELECT 1')
+ subject.track_sql('SELECT * FROM users')
+ end
+
+ it { expect(data[:queries]).to eq(['SELECT 1', 'SELECT * FROM users']) }
+ end
+
+ describe '#duration' do
+ before do
+ subject.set_start_time
+ end
+
+ it { expect(subject.duration).to be >= 0 }
+ end
+
+ context 'when depth is low' do
+ it 'does not log data upon COMMIT' do
+ expect(subject).not_to receive(:application_info)
+
+ subject.commit
+ end
+
+ it 'does not log data upon ROLLBACK' do
+ expect(subject).not_to receive(:application_info)
+
+ subject.rollback
+ end
+
+ it '#should_log? returns false' do
+ expect(subject.should_log?).to be false
+ end
+ end
+
+ shared_examples 'logs transaction data' do
+ it 'logs once upon COMMIT' do
+ expect(subject).to receive(:application_info).and_call_original
+
+ 2.times { subject.commit }
+ end
+
+ it 'logs once upon ROLLBACK' do
+ expect(subject).to receive(:application_info).once
+
+ 2.times { subject.rollback }
+ end
+
+ it 'logs again when log throttle duration passes' do
+ expect(subject).to receive(:application_info).twice.and_call_original
+
+ 2.times { subject.commit }
+
+ data[:last_log_timestamp] -= (described_class::LOG_THROTTLE_DURATION + 1)
+
+ subject.commit
+ end
+
+ it '#should_log? returns true' do
+ expect(subject.should_log?).to be true
+ end
+ end
+
+ context 'when depth exceeds threshold' do
+ before do
+ subject.set_depth(described_class::LOG_DEPTH_THRESHOLD + 1)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
+
+ context 'when savepoints count exceeds threshold' do
+ before do
+ data[:savepoints] = described_class::LOG_SAVEPOINTS_THRESHOLD + 1
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
+
+ context 'when duration exceeds threshold' do
+ before do
+ subject.set_start_time
+
+ data[:start_time] -= (described_class::LOG_DURATION_S_THRESHOLD + 1)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
+end
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
new file mode 100644
index 00000000000..7aa24217dc3
--- /dev/null
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Transaction::Observer do
+ # Use the delete DB strategy so that the test won't be wrapped in a transaction
+ describe '.instrument_transactions', :delete do
+ let(:transaction_context) { ActiveRecord::Base.connection.transaction_manager.transaction_context }
+ let(:context) { transaction_context.context }
+
+ around do |example|
+ # Emulate production environment when SQL comments come first to avoid truncation
+ Marginalia::Comment.prepend_comment = true
+ subscriber = described_class.register!
+
+ example.run
+
+ ActiveSupport::Notifications.unsubscribe(subscriber)
+ Marginalia::Comment.prepend_comment = false
+ end
+
+ it 'tracks transaction data', :aggregate_failures do
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction(requires_new: true) do
+ User.first
+
+ expect(transaction_context).to be_a(::Gitlab::Database::Transaction::Context)
+ expect(context.keys).to match_array(%i(start_time depth savepoints queries))
+ expect(context[:depth]).to eq(2)
+ expect(context[:savepoints]).to eq(1)
+ expect(context[:queries].length).to eq(1)
+ end
+ end
+
+ expect(context[:depth]).to eq(2)
+ expect(context[:savepoints]).to eq(1)
+ expect(context[:releases]).to eq(1)
+ end
+
+ describe '.extract_sql_command' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:sql, :expected) do
+ 'SELECT 1' | 'SELECT 1'
+ '/* test comment */ SELECT 1' | 'SELECT 1'
+ '/* test comment */ ROLLBACK TO SAVEPOINT point1' | 'ROLLBACK TO SAVEPOINT '
+ 'SELECT 1 /* trailing comment */' | 'SELECT 1 /* trailing comment */'
+ end
+
+ with_them do
+ it do
+ expect(described_class.extract_sql_command(sql)).to eq(expected)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
index ff8e76311ae..0282a7af0df 100644
--- a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
@@ -37,22 +37,20 @@ RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
- configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
+ # Initiating a separate DB connection for the lock
+ conn = ActiveRecord::Base.connection_pool.checkout
- # Initiating a second DB connection for the lock
- conn = ActiveRecordSecond.establish_connection(configuration).connection
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
Fiber.yield
end
- ActiveRecordSecond.remove_connection # force disconnect
+ # Releasing the connection we requested
+ ActiveRecord::Base.connection_pool.checkin(conn)
end
end
before do
- stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
-
lock_fiber.resume # start the transaction and lock the table
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 367f793b117..72074f06210 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -37,22 +37,19 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
- configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
-
- # Initiating a second DB connection for the lock
- conn = ActiveRecordSecond.establish_connection(configuration).connection
+ # Initiating a separate DB connection for the lock
+ conn = ActiveRecord::Base.connection_pool.checkout
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
Fiber.yield
end
- ActiveRecordSecond.remove_connection # force disconnect
+ # Releasing the connection we requested
+ ActiveRecord::Base.connection_pool.checkin(conn)
end
end
before do
- stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
-
lock_fiber.resume # start the transaction and lock the table
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index a834e41c019..c67b5af5e3c 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,32 +15,6 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.default_pool_size' do
- before do
- allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
- end
-
- it 'returns the max thread size plus a fixed headroom of 10' do
- expect(described_class.default_pool_size).to eq(17)
- end
-
- it 'returns the max thread size plus a DB_POOL_HEADROOM if this env var is present' do
- stub_env('DB_POOL_HEADROOM', '7')
-
- expect(described_class.default_pool_size).to eq(14)
- end
- end
-
- describe '.config' do
- it 'returns a HashWithIndifferentAccess' do
- expect(described_class.config).to be_an_instance_of(HashWithIndifferentAccess)
- end
-
- it 'returns a default pool size' do
- expect(described_class.config).to include(pool: described_class.default_pool_size)
- end
- end
-
describe '.has_config?' do
context 'two tier database config' do
before do
@@ -114,108 +88,11 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.adapter_name' do
- it 'returns the name of the adapter' do
- expect(described_class.adapter_name).to be_an_instance_of(String)
- end
-
- it 'returns Unknown when using anything else' do
- allow(described_class).to receive(:postgresql?).and_return(false)
-
- expect(described_class.human_adapter_name).to eq('Unknown')
- end
- end
-
- describe '.human_adapter_name' do
- it 'returns PostgreSQL when using PostgreSQL' do
- expect(described_class.human_adapter_name).to eq('PostgreSQL')
- end
- end
-
- describe '.system_id' do
- it 'returns the PostgreSQL system identifier' do
- expect(described_class.system_id).to be_an_instance_of(Integer)
- end
- end
-
- describe '.disable_prepared_statements' do
- around do |example|
- original_config = ::Gitlab::Database.config
-
- example.run
-
- ActiveRecord::Base.establish_connection(original_config)
- end
-
- it 'disables prepared statements' do
- ActiveRecord::Base.establish_connection(::Gitlab::Database.config.merge(prepared_statements: true))
- expect(ActiveRecord::Base.connection.prepared_statements).to eq(true)
-
- expect(ActiveRecord::Base).to receive(:establish_connection)
- .with(a_hash_including({ 'prepared_statements' => false })).and_call_original
-
- described_class.disable_prepared_statements
-
- expect(ActiveRecord::Base.connection.prepared_statements).to eq(false)
- end
- end
-
- describe '.postgresql?' do
- subject { described_class.postgresql? }
-
- it { is_expected.to satisfy { |val| val == true || val == false } }
- end
-
- describe '.version' do
- around do |example|
- described_class.instance_variable_set(:@version, nil)
- example.run
- described_class.instance_variable_set(:@version, nil)
- end
-
- context "on postgresql" do
- it "extracts the version number" do
- allow(described_class).to receive(:database_version)
- .and_return("PostgreSQL 9.4.4 on x86_64-apple-darwin14.3.0")
-
- expect(described_class.version).to eq '9.4.4'
- end
- end
-
- it 'memoizes the result' do
- count = ActiveRecord::QueryRecorder
- .new { 2.times { described_class.version } }
- .count
-
- expect(count).to eq(1)
- end
- end
-
- describe '.postgresql_minimum_supported_version?' do
- it 'returns false when using PostgreSQL 10' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.postgresql_minimum_supported_version?).to eq(false)
- end
-
- it 'returns false when using PostgreSQL 11' do
- allow(described_class).to receive(:version).and_return('11')
-
- expect(described_class.postgresql_minimum_supported_version?).to eq(false)
- end
-
- it 'returns true when using PostgreSQL 12' do
- allow(described_class).to receive(:version).and_return('12')
-
- expect(described_class.postgresql_minimum_supported_version?).to eq(true)
- end
- end
-
describe '.check_postgres_version_and_print_warning' do
subject { described_class.check_postgres_version_and_print_warning }
it 'prints a warning if not compliant with minimum postgres version' do
- allow(described_class).to receive(:postgresql_minimum_supported_version?).and_return(false)
+ allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_return(false)
expect(Kernel).to receive(:warn).with(/You are using PostgreSQL/)
@@ -223,7 +100,7 @@ RSpec.describe Gitlab::Database do
end
it 'doesnt print a warning if compliant with minimum postgres version' do
- allow(described_class).to receive(:postgresql_minimum_supported_version?).and_return(true)
+ allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_return(true)
expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/)
@@ -231,7 +108,7 @@ RSpec.describe Gitlab::Database do
end
it 'doesnt print a warning in Rails runner environment' do
- allow(described_class).to receive(:postgresql_minimum_supported_version?).and_return(false)
+ allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_return(false)
allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true)
expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/)
@@ -240,13 +117,13 @@ RSpec.describe Gitlab::Database do
end
it 'ignores ActiveRecord errors' do
- allow(described_class).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError)
+ allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError)
expect { subject }.not_to raise_error
end
it 'ignores Postgres errors' do
- allow(described_class).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error)
+ allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error)
expect { subject }.not_to raise_error
end
@@ -262,244 +139,19 @@ RSpec.describe Gitlab::Database do
it { expect(described_class.nulls_first_order('column', 'DESC')).to eq 'column DESC NULLS FIRST'}
end
- describe '.with_connection_pool' do
- it 'creates a new connection pool and disconnect it after used' do
- closed_pool = nil
-
- described_class.with_connection_pool(1) do |pool|
- pool.with_connection do |connection|
- connection.execute('SELECT 1 AS value')
- end
-
- expect(pool).to be_connected
-
- closed_pool = pool
- end
-
- expect(closed_pool).not_to be_connected
- end
-
- it 'disconnects the pool even an exception was raised' do
- error = Class.new(RuntimeError)
- closed_pool = nil
-
- begin
- described_class.with_connection_pool(1) do |pool|
- pool.with_connection do |connection|
- connection.execute('SELECT 1 AS value')
- end
-
- closed_pool = pool
-
- raise error, 'boom'
- end
- rescue error
- end
-
- expect(closed_pool).not_to be_connected
- end
- end
-
- describe '.bulk_insert' do
- before do
- allow(described_class).to receive(:connection).and_return(connection)
- allow(connection).to receive(:quote_column_name, &:itself)
- allow(connection).to receive(:quote, &:itself)
- allow(connection).to receive(:execute)
- end
-
- let(:connection) { double(:connection) }
-
- let(:rows) do
- [
- { a: 1, b: 2, c: 3 },
- { c: 6, a: 4, b: 5 }
- ]
- end
-
- it 'does nothing with empty rows' do
- expect(connection).not_to receive(:execute)
-
- described_class.bulk_insert('test', [])
- end
-
- it 'uses the ordering from the first row' do
- expect(connection).to receive(:execute) do |sql|
- expect(sql).to include('(1, 2, 3)')
- expect(sql).to include('(4, 5, 6)')
- end
-
- described_class.bulk_insert('test', rows)
- end
-
- it 'quotes column names' do
- expect(connection).to receive(:quote_column_name).with(:a)
- expect(connection).to receive(:quote_column_name).with(:b)
- expect(connection).to receive(:quote_column_name).with(:c)
-
- described_class.bulk_insert('test', rows)
- end
-
- it 'quotes values' do
- 1.upto(6) do |i|
- expect(connection).to receive(:quote).with(i)
- end
-
- described_class.bulk_insert('test', rows)
- end
-
- it 'does not quote values of a column in the disable_quote option' do
- [1, 2, 4, 5].each do |i|
- expect(connection).to receive(:quote).with(i)
- end
-
- described_class.bulk_insert('test', rows, disable_quote: :c)
- end
-
- it 'does not quote values of columns in the disable_quote option' do
- [2, 5].each do |i|
- expect(connection).to receive(:quote).with(i)
- end
-
- described_class.bulk_insert('test', rows, disable_quote: [:a, :c])
- end
-
- it 'handles non-UTF-8 data' do
- expect { described_class.bulk_insert('test', [{ a: "\255" }]) }.not_to raise_error
- end
-
- context 'when using PostgreSQL' do
- it 'allows the returning of the IDs of the inserted rows' do
- result = double(:result, values: [['10']])
-
- expect(connection)
- .to receive(:execute)
- .with(/RETURNING id/)
- .and_return(result)
-
- ids = described_class
- .bulk_insert('test', [{ number: 10 }], return_ids: true)
-
- expect(ids).to eq([10])
- end
-
- it 'allows setting the upsert to do nothing' do
- expect(connection)
- .to receive(:execute)
- .with(/ON CONFLICT DO NOTHING/)
-
- described_class
- .bulk_insert('test', [{ number: 10 }], on_conflict: :do_nothing)
- end
- end
- end
-
- describe '.create_connection_pool' do
- it 'creates a new connection pool with specific pool size' do
- pool = described_class.create_connection_pool(5)
-
- begin
- expect(pool)
- .to be_kind_of(ActiveRecord::ConnectionAdapters::ConnectionPool)
-
- expect(pool.db_config.pool).to eq(5)
- ensure
- pool.disconnect!
- end
- end
-
- it 'allows setting of a custom hostname' do
- pool = described_class.create_connection_pool(5, '127.0.0.1')
-
- begin
- expect(pool.db_config.host).to eq('127.0.0.1')
- ensure
- pool.disconnect!
- end
- end
-
- it 'allows setting of a custom hostname and port' do
- pool = described_class.create_connection_pool(5, '127.0.0.1', 5432)
-
- begin
- expect(pool.db_config.host).to eq('127.0.0.1')
- expect(pool.db_config.configuration_hash[:port]).to eq(5432)
- ensure
- pool.disconnect!
- end
- end
- end
-
- describe '.cached_column_exists?' do
- it 'only retrieves data once' do
- expect(ActiveRecord::Base.connection).to receive(:columns).once.and_call_original
-
- 2.times do
- expect(described_class.cached_column_exists?(:projects, :id)).to be_truthy
- expect(described_class.cached_column_exists?(:projects, :bogus_column)).to be_falsey
- end
- end
- end
-
- describe '.cached_table_exists?' do
- it 'only retrieves data once per table' do
- expect(ActiveRecord::Base.connection).to receive(:data_source_exists?).with(:projects).once.and_call_original
- expect(ActiveRecord::Base.connection).to receive(:data_source_exists?).with(:bogus_table_name).once.and_call_original
-
- 2.times do
- expect(described_class.cached_table_exists?(:projects)).to be_truthy
- expect(described_class.cached_table_exists?(:bogus_table_name)).to be_falsey
- end
- end
-
- it 'returns false when database does not exist' do
- expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, 'broken' }
-
- expect(described_class.cached_table_exists?(:projects)).to be(false)
- end
- end
-
- describe '.exists?' do
- it 'returns true if `ActiveRecord::Base.connection` succeeds' do
- expect(ActiveRecord::Base).to receive(:connection)
-
- expect(described_class.exists?).to be(true)
- end
-
- it 'returns false if `ActiveRecord::Base.connection` fails' do
- expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, 'broken' }
-
- expect(described_class.exists?).to be(false)
- end
- end
-
- describe '.get_write_location' do
- it 'returns a string' do
+ describe '.db_config_name' do
+ it 'returns the db_config name for the connection' do
connection = ActiveRecord::Base.connection
- expect(described_class.get_write_location(connection)).to be_a(String)
- end
-
- it 'returns nil if there are no results' do
- connection = double(select_all: [])
-
- expect(described_class.get_write_location(connection)).to be_nil
- end
- end
-
- describe '.dbname' do
- it 'returns the dbname for the connection' do
- connection = ActiveRecord::Base.connection
-
- expect(described_class.dbname(connection)).to be_a(String)
- expect(described_class.dbname(connection)).to eq(connection.pool.db_config.database)
+ expect(described_class.db_config_name(connection)).to be_a(String)
+ expect(described_class.db_config_name(connection)).to eq(connection.pool.db_config.name)
end
context 'when the pool is a NullPool' do
it 'returns unknown' do
connection = double(:active_record_connection, pool: ActiveRecord::ConnectionAdapters::NullPool.new)
- expect(described_class.dbname(connection)).to eq('unknown')
+ expect(described_class.db_config_name(connection)).to eq('unknown')
end
end
end
@@ -516,42 +168,6 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.read_only?' do
- it 'returns false' do
- expect(described_class.read_only?).to be_falsey
- end
- end
-
- describe '.db_read_only?' do
- before do
- allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
- end
-
- it 'detects a read-only database' do
- allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "t" }])
-
- expect(described_class.db_read_only?).to be_truthy
- end
-
- it 'detects a read-only database' do
- allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => true }])
-
- expect(described_class.db_read_only?).to be_truthy
- end
-
- it 'detects a read-write database' do
- allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "f" }])
-
- expect(described_class.db_read_only?).to be_falsey
- end
-
- it 'detects a read-write database' do
- allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => false }])
-
- expect(described_class.db_read_only?).to be_falsey
- end
- end
-
describe '#sanitize_timestamp' do
let(:max_timestamp) { Time.at((1 << 31) - 1) }
@@ -574,6 +190,18 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.read_only?' do
+ it 'returns false' do
+ expect(described_class.read_only?).to eq(false)
+ end
+ end
+
+ describe '.read_write' do
+ it 'returns true' do
+ expect(described_class.read_write?).to eq(true)
+ end
+ end
+
describe 'ActiveRecordBaseTransactionMetrics' do
def subscribe_events
events = []
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index e76a5d3fe32..c0ac40e3249 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -110,6 +110,60 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
+ context 'when email contains reply' do
+ shared_examples 'no content message' do
+ context 'when email contains quoted text only' do
+ let(:email_raw) { fixture_file('emails/no_content_with_quote.eml') }
+
+ it 'raises an EmptyEmailError' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::EmptyEmailError)
+ end
+ end
+
+ context 'when email contains quoted text and quick commands only' do
+ let(:email_raw) { fixture_file('emails/commands_only_reply.eml') }
+
+ it 'does not create a discussion' do
+ expect { receiver.execute }.not_to change { noteable.notes.count }
+ end
+ end
+ end
+
+ context 'when noteable is not an issue' do
+ let_it_be(:note) { create(:note_on_merge_request, project: project) }
+
+ it_behaves_like 'no content message'
+
+ context 'when email contains text, quoted text and quick commands' do
+ let(:email_raw) { fixture_file('emails/commands_in_reply.eml') }
+
+ it 'creates a discussion without appended reply' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ new_note = noteable.notes.last
+
+ expect(new_note.note).not_to include('<details><summary>...</summary>')
+ end
+ end
+ end
+
+ context 'when noteable is an issue' do
+ let_it_be(:note) { create(:note_on_issue, project: project) }
+
+ it_behaves_like 'no content message'
+
+ context 'when email contains text, quoted text and quick commands' do
+ let(:email_raw) { fixture_file('emails/commands_in_reply.eml') }
+
+ it 'creates a discussion with appended reply' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ new_note = noteable.notes.last
+
+ expect(new_note.note).to include('<details><summary>...</summary>')
+ end
+ end
+ end
+ end
+
context 'when note is not a discussion' do
let(:note) { create(:note_on_merge_request, project: project) }
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
new file mode 100644
index 00000000000..b5c3415fe12
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::AdminVerify do
+ let_it_be(:group) { build(:group) }
+ let_it_be(:user) { build(:user) }
+
+ let(:series) { 0 }
+
+ subject(:message) { described_class.new(group: group, user: user, series: series)}
+
+ describe 'public methods' do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to eq 'Create a custom CI runner with just a few clicks'
+ expect(message.tagline).to be_nil
+ expect(message.title).to eq 'Spin up an autoscaling runner in GitLab'
+ expect(message.subtitle).to eq 'Use our AWS cloudformation template to spin up your runners in just a few clicks!'
+ expect(message.body_line1).to be_empty
+ expect(message.body_line2).to be_empty
+ expect(message.cta_text).to eq 'Create a custom runner'
+ expect(message.logo_path).to eq 'mailers/in_product_marketing/admin_verify-0.png'
+ end
+
+ describe '#progress' do
+ subject { message.progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to eq('This is email 1 of 1 in the Admin series.') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include('This is email 1 of 1 in the Admin series', Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
new file mode 100644
index 00000000000..daeacef53f6
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::TeamShort do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+ let_it_be(:user) { build(:user) }
+
+ let(:series) { 0 }
+
+ subject(:message) { described_class.new(group: group, user: user, series: series)}
+
+ describe 'public methods' do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to eq 'Team up in GitLab for greater efficiency'
+ expect(message.tagline).to be_nil
+ expect(message.title).to eq 'Turn coworkers into collaborators'
+ expect(message.subtitle).to eq 'Invite your team today to build better code (and processes) together'
+ expect(message.body_line1).to be_empty
+ expect(message.body_line2).to be_empty
+ expect(message.cta_text).to eq 'Invite your colleagues today'
+ expect(message.logo_path).to eq 'mailers/in_product_marketing/team-0.png'
+ end
+
+ describe '#progress' do
+ subject { message.progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include('This is email 1 of 4 in the Team series') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include('This is email 1 of 4 in the Team series', Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
index f72994fcce1..eca8ba1df00 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
@@ -23,6 +23,26 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do
expect(message.body_line2).to be_present
expect(message.cta_text).to be_present
end
+
+ describe '#progress' do
+ subject { message.progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include("This is email #{series + 2} of 4 in the Team series") }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include("This is email #{series + 2} of 4 in the Team series", Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
end
context 'with series 2' do
@@ -37,6 +57,26 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do
expect(message.body_line2).to be_present
expect(message.cta_text).to be_present
end
+
+ describe '#progress' do
+ subject { message.progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include('This is email 4 of 4 in the Team series') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include('This is email 4 of 4 in the Team series', Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
new file mode 100644
index 00000000000..ebad4672eb3
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::TrialShort do
+ let_it_be(:group) { build(:group) }
+ let_it_be(:user) { build(:user) }
+
+ let(:series) { 0 }
+
+ subject(:message) { described_class.new(group: group, user: user, series: series)}
+
+ describe 'public methods' do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to eq 'Be a DevOps hero'
+ expect(message.tagline).to be_nil
+ expect(message.title).to eq 'Expand your DevOps journey with a free GitLab trial'
+ expect(message.subtitle).to eq 'Start your trial today to experience single application success and discover all the features of GitLab Ultimate for free!'
+ expect(message.body_line1).to be_empty
+ expect(message.body_line2).to be_empty
+ expect(message.cta_text).to eq 'Start a trial'
+ expect(message.logo_path).to eq 'mailers/in_product_marketing/trial-0.png'
+ end
+
+ describe '#progress' do
+ subject { message.progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to eq('This is email 1 of 4 in the Trial series.') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include('This is email 1 of 4 in the Trial series', Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
index 5f7639a9ed6..3e18b8e35b6 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
@@ -23,6 +23,26 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do
expect(message.body_line2).to be_present
expect(message.cta_text).to be_present
end
+
+ describe '#progress' do
+ subject { message.progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to eq("This is email #{series + 2} of 4 in the Trial series.") }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include("This is email #{series + 2} of 4 in the Trial series", Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb
index bc4c6cf007d..3b01b568fb4 100644
--- a/spec/lib/gitlab/email/reply_parser_spec.rb
+++ b/spec/lib/gitlab/email/reply_parser_spec.rb
@@ -228,5 +228,21 @@ RSpec.describe Gitlab::Email::ReplyParser do
BODY
)
end
+
+ it "appends trimmed reply when when append_reply option is true" do
+ body = <<-BODY.strip_heredoc.chomp
+ The reply by email functionality should be extended to allow creating a new issue by email.
+ even when the email is forwarded to the project which may include lines that begin with ">"
+
+ there should be a quote below this line:
+ BODY
+
+ reply = <<-BODY.strip_heredoc.chomp
+ > this is a quote
+ BODY
+
+ expect(test_parse_body(fixture_file("emails/valid_new_issue_with_quote.eml"), { append_reply: true }))
+ .to contain_exactly(body, reply)
+ end
end
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index cf0d1577314..268ac5dcc21 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -3,6 +3,8 @@
require "spec_helper"
RSpec.describe Gitlab::EncodingHelper do
+ using RSpec::Parameterized::TableSyntax
+
let(:ext_class) { Class.new { extend Gitlab::EncodingHelper } }
let(:binary_string) { File.read(Rails.root + "spec/fixtures/dk.png") }
@@ -90,6 +92,22 @@ RSpec.describe Gitlab::EncodingHelper do
end
end
+ describe '#encode_utf8_no_detect' do
+ where(:input, :expected) do
+ "abcd" | "abcd"
+ "DzDzDz" | "DzDzDz"
+ "\xC7\xB2\xC7DzDzDz" | "Dz�DzDzDz"
+ "🐤🐤🐤🐤\xF0\x9F\x90" | "🐤🐤🐤🐤�"
+ end
+
+ with_them do
+ it 'drops invalid UTF-8' do
+ expect(ext_class.encode_utf8_no_detect(input.dup.force_encoding(Encoding::ASCII_8BIT))).to eq(expected)
+ expect(ext_class.encode_utf8_no_detect(input)).to eq(expected)
+ end
+ end
+ end
+
describe '#encode_utf8' do
[
["nil", nil, nil],
diff --git a/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb b/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb
index 5c496d653b2..577d59798da 100644
--- a/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb
+++ b/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::StackTraceHighlightDecorator do
- let(:error_event) { build(:error_tracking_error_event) }
+ let(:error_event) { build(:error_tracking_sentry_error_event) }
describe '.decorate' do
subject(:decorate) { described_class.decorate(error_event) }
diff --git a/spec/lib/gitlab/etag_caching/router/restful_spec.rb b/spec/lib/gitlab/etag_caching/router/restful_spec.rb
index 877789b320f..1f5cac09b6d 100644
--- a/spec/lib/gitlab/etag_caching/router/restful_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router/restful_spec.rb
@@ -87,12 +87,18 @@ RSpec.describe Gitlab::EtagCaching::Router::Restful do
end
it 'matches the environments path' do
- result = match_route('/my-group/my-project/environments.json')
+ result = match_route('/my-group/my-project/-/environments.json')
expect(result).to be_present
expect(result.name).to eq 'environments'
end
+ it 'does not match the operations environments list path' do
+ result = match_route('/-/operations/environments.json')
+
+ expect(result).not_to be_present
+ end
+
it 'matches pipeline#show endpoint' do
result = match_route('/my-group/my-project/-/pipelines/2.json')
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 7a619c9f155..8535d72a61f 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
+ include TrackingHelpers
+
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
backwards_compatible_test_experiment: {
@@ -43,7 +45,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
let(:cookie_value) { nil }
before do
- request.headers['DNT'] = do_not_track if do_not_track.present?
+ stub_do_not_track(do_not_track) if do_not_track.present?
request.cookies[:experimentation_subject_id] = cookie_value if cookie_value
get :index
@@ -242,7 +244,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
context 'do not track is disabled' do
before do
- request.headers['DNT'] = '0'
+ stub_do_not_track('0')
end
it 'does track the event' do
@@ -260,7 +262,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
context 'do not track enabled' do
before do
- request.headers['DNT'] = '1'
+ stub_do_not_track('1')
end
it 'does not track the event' do
@@ -396,7 +398,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
context 'do not track disabled' do
before do
- request.headers['DNT'] = '0'
+ stub_do_not_track('0')
end
it 'pushes the right parameters to gon' do
@@ -414,7 +416,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
context 'do not track enabled' do
before do
- request.headers['DNT'] = '1'
+ stub_do_not_track('1')
end
it 'does not push data to gon' do
@@ -525,7 +527,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
context 'is disabled' do
before do
- request.headers['DNT'] = '0'
+ stub_do_not_track('0')
stub_experiment_for_subject(test_experiment: false)
end
@@ -538,7 +540,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
context 'is enabled' do
before do
- request.headers['DNT'] = '1'
+ stub_do_not_track('1')
end
it 'does not call add_user on the Experiment model' do
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 10bfa9e8d0e..c486538a260 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -2,22 +2,6 @@
require 'spec_helper'
-# As each associated, backwards-compatible experiment gets cleaned up and removed from the EXPERIMENTS list, its key will also get removed from this list. Once the list here is empty, we can remove the backwards compatibility code altogether.
-# Originally created as part of https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45733 for https://gitlab.com/gitlab-org/gitlab/-/issues/270858.
-RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
- it 'temporarily ensures we know what experiments exist for backwards compatibility' do
- expected_experiment_keys = [
- :invite_members_empty_group_version_a,
- :contact_sales_btn_in_app
- ]
-
- backwards_compatible_experiment_keys = described_class.filter { |_, v| v[:use_backwards_compatible_subject_index] }.keys
-
- expect(backwards_compatible_experiment_keys).not_to be_empty, "Oh, hey! Let's clean up that :use_backwards_compatible_subject_index stuff now :D"
- expect(backwards_compatible_experiment_keys).to match(expected_experiment_keys)
- end
-end
-
RSpec.describe Gitlab::Experimentation do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/fake_application_settings_spec.rb b/spec/lib/gitlab/fake_application_settings_spec.rb
index ec32afcfb7b..b300498e898 100644
--- a/spec/lib/gitlab/fake_application_settings_spec.rb
+++ b/spec/lib/gitlab/fake_application_settings_spec.rb
@@ -6,27 +6,35 @@ RSpec.describe Gitlab::FakeApplicationSettings do
let(:defaults) do
described_class.defaults.merge(
foobar: 'asdf',
- 'test?' => 123
+ 'test?'.to_sym => 123,
+ # these two settings have no default in ApplicationSettingImplementation,
+ # so we need to set one here
+ domain_denylist: [],
+ archive_builds_in_seconds: nil
)
end
let(:setting) { described_class.new(defaults) }
- it 'wraps OpenStruct variables properly' do
+ it 'defines methods for default attributes' do
expect(setting.password_authentication_enabled_for_web).to be_truthy
expect(setting.signup_enabled).to be_truthy
expect(setting.foobar).to eq('asdf')
end
- it 'defines predicate methods' do
+ it 'defines predicate methods for boolean properties' do
expect(setting.password_authentication_enabled_for_web?).to be_truthy
expect(setting.signup_enabled?).to be_truthy
end
- it 'does not define a predicate method' do
+ it 'does not define a predicate method for non-boolean properties' do
expect(setting.foobar?).to be_nil
end
+ it 'returns nil for undefined attributes' do
+ expect(setting.does_not_exist).to be_nil
+ end
+
it 'does not override an existing predicate method' do
expect(setting.test?).to eq(123)
end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
new file mode 100644
index 00000000000..a46846e9820
--- /dev/null
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
+ let_it_be(:user) { build(:user) }
+ let_it_be(:fake_template) do
+ Object.new.tap do |template|
+ template.extend ActionView::Helpers::FormHelper
+ template.extend ActionView::Helpers::FormOptionsHelper
+ template.extend ActionView::Helpers::TagHelper
+ template.extend ActionView::Context
+ end
+ end
+
+ let_it_be(:form_builder) { described_class.new(:user, user, fake_template, {}) }
+
+ describe '#gitlab_ui_checkbox_component' do
+ let(:optional_args) { {} }
+
+ subject(:checkbox_html) { form_builder.gitlab_ui_checkbox_component(:view_diffs_file_by_file, "Show one file at a time on merge request's Changes tab", **optional_args) }
+
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file">
+ Show one file at a time on merge request&#39;s Changes tab
+ </label>
+ </div>
+ EOS
+
+ expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ end
+ end
+
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ help_text: 'Instead of all the files changed, show only one file at a time.',
+ checkbox_options: { class: 'checkbox-foo-bar' },
+ label_options: { class: 'label-foo-bar' },
+ checked_value: '3',
+ unchecked_value: '1'
+ }
+ end
+
+ it 'renders help text' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="1" />
+ <input class="custom-control-input checkbox-foo-bar" type="checkbox" value="3" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label label-foo-bar" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ <p class="help-text">Instead of all the files changed, show only one file at a time.</p>
+ </label>
+ </div>
+ EOS
+
+ expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ end
+
+ it 'passes arguments to `check_box` method' do
+ allow(fake_template).to receive(:check_box).and_return('')
+
+ checkbox_html
+
+ expect(fake_template).to have_received(:check_box).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-input checkbox-foo-bar), object: user }, '3', '1')
+ end
+
+ it 'passes arguments to `label` method' do
+ allow(fake_template).to receive(:label).and_return('')
+
+ checkbox_html
+
+ expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user })
+ end
+ end
+ end
+
+ private
+
+ def html_strip_whitespace(html)
+ html.lines.map(&:strip).join('')
+ end
+end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 49f1e6e994f..f58bab52cfa 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -369,11 +369,15 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
commits.map { |c| c.id }
end
- it 'has 1 element' do
- expect(subject.size).to eq(1)
+ it { is_expected.to contain_exactly(SeedRepo::Commit::ID) }
+
+ context 'between_uses_list_commits FF disabled' do
+ before do
+ stub_feature_flags(between_uses_list_commits: false)
+ end
+
+ it { is_expected.to contain_exactly(SeedRepo::Commit::ID) }
end
- it { is_expected.to include(SeedRepo::Commit::ID) }
- it { is_expected.not_to include(SeedRepo::FirstCommit::ID) }
end
describe '.shas_with_signatures' do
diff --git a/spec/lib/gitlab/git/commit_stats_spec.rb b/spec/lib/gitlab/git/commit_stats_spec.rb
new file mode 100644
index 00000000000..29d3909efec
--- /dev/null
+++ b/spec/lib/gitlab/git/commit_stats_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Git::CommitStats, :seed_helper do
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+ let(:commit) { Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID) }
+
+ def verify_stats!
+ stats = described_class.new(repository, commit)
+
+ expect(stats).to have_attributes(
+ additions: eq(11),
+ deletions: eq(6),
+ total: eq(17)
+ )
+ end
+
+ it 'returns commit stats and caches them', :use_clean_rails_redis_caching do
+ expect(repository.gitaly_commit_client).to receive(:commit_stats).with(commit.id).and_call_original
+
+ verify_stats!
+
+ expect(Rails.cache.fetch("commit_stats:group/project:#{commit.id}")).to eq([11, 6])
+
+ expect(repository.gitaly_commit_client).not_to receive(:commit_stats)
+
+ verify_stats!
+ end
+end
diff --git a/spec/lib/gitlab/git/conflict/file_spec.rb b/spec/lib/gitlab/git/conflict/file_spec.rb
index 454a48a1d3a..6eb7a7e394e 100644
--- a/spec/lib/gitlab/git/conflict/file_spec.rb
+++ b/spec/lib/gitlab/git/conflict/file_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Git::Conflict::File do
- let(:conflict) { { theirs: { path: 'foo', mode: 33188 }, ours: { path: 'foo', mode: 33188 } } }
+ let(:conflict) { { ancestor: { path: 'ancestor' }, theirs: { path: 'foo', mode: 33188 }, ours: { path: 'foo', mode: 33188 } } }
let(:invalid_content) { described_class.new(nil, nil, conflict, (+"a\xC4\xFC").force_encoding(Encoding::ASCII_8BIT)) }
let(:valid_content) { described_class.new(nil, nil, conflict, (+"Espa\xC3\xB1a").force_encoding(Encoding::ASCII_8BIT)) }
@@ -48,4 +48,18 @@ RSpec.describe Gitlab::Git::Conflict::File do
end
end
end
+
+ describe '#path' do
+ it 'returns our_path' do
+ expect(valid_content.path).to eq(conflict[:ours][:path])
+ end
+
+ context 'when our_path is not present' do
+ let(:conflict) { { ancestor: { path: 'ancestor' }, theirs: { path: 'theirs', mode: 33188 }, ours: { path: '', mode: 0 } } }
+
+ it 'returns their_path' do
+ expect(valid_content.path).to eq(conflict[:theirs][:path])
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/remote_mirror_spec.rb b/spec/lib/gitlab/git/remote_mirror_spec.rb
index 0954879f6bd..4b827e5d2d0 100644
--- a/spec/lib/gitlab/git/remote_mirror_spec.rb
+++ b/spec/lib/gitlab/git/remote_mirror_spec.rb
@@ -6,30 +6,17 @@ RSpec.describe Gitlab::Git::RemoteMirror do
describe '#update' do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
- let(:ref_name) { 'foo' }
let(:url) { 'https://example.com' }
let(:options) { { only_branches_matching: ['master'], ssh_key: 'KEY', known_hosts: 'KNOWN HOSTS', keep_divergent_refs: true } }
- subject(:remote_mirror) { described_class.new(repository, ref_name, url, **options) }
+ subject(:remote_mirror) { described_class.new(repository, url, **options) }
- shared_examples 'an update' do
- it 'delegates to the Gitaly client' do
- expect(repository.gitaly_remote_client)
- .to receive(:update_remote_mirror)
- .with(ref_name, url, ['master'], ssh_key: 'KEY', known_hosts: 'KNOWN HOSTS', keep_divergent_refs: true)
-
- remote_mirror.update # rubocop:disable Rails/SaveBang
- end
- end
-
- context 'with url' do
- it_behaves_like 'an update'
- end
-
- context 'without url' do
- let(:url) { nil }
+ it 'delegates to the Gitaly client' do
+ expect(repository.gitaly_remote_client)
+ .to receive(:update_remote_mirror)
+ .with(url, ['master'], ssh_key: 'KEY', known_hosts: 'KNOWN HOSTS', keep_divergent_refs: true)
- it_behaves_like 'an update'
+ remote_mirror.update # rubocop:disable Rails/SaveBang
end
it 'wraps gitaly errors' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 706bcdea291..29e7a1dce1d 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -491,6 +491,8 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#fetch_remote' do
+ let(:url) { 'http://example.clom' }
+
it 'delegates to the gitaly RepositoryService' do
ssh_auth = double(:ssh_auth)
expected_opts = {
@@ -500,17 +502,17 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
timeout: described_class::GITLAB_PROJECTS_TIMEOUT,
prune: false,
check_tags_changed: false,
- url: nil,
- refmap: nil
+ refmap: nil,
+ http_authorization_header: ""
}
- expect(repository.gitaly_repository_client).to receive(:fetch_remote).with('remote-name', expected_opts)
+ expect(repository.gitaly_repository_client).to receive(:fetch_remote).with(url, expected_opts)
- repository.fetch_remote('remote-name', ssh_auth: ssh_auth, forced: true, no_tags: true, prune: false, check_tags_changed: false)
+ repository.fetch_remote(url, ssh_auth: ssh_auth, forced: true, no_tags: true, prune: false, check_tags_changed: false)
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RepositoryService, :fetch_remote do
- subject { repository.fetch_remote('remote-name') }
+ subject { repository.fetch_remote(url) }
end
end
@@ -584,29 +586,29 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.to receive(:find_remote_root_ref).and_call_original
- expect(repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to eq 'master'
+ expect(repository.find_remote_root_ref(SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to eq 'master'
end
it 'returns UTF-8' do
- expect(repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to be_utf8
+ expect(repository.find_remote_root_ref(SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to be_utf8
end
it 'returns nil when remote name is nil' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.not_to receive(:find_remote_root_ref)
- expect(repository.find_remote_root_ref(nil, nil)).to be_nil
+ expect(repository.find_remote_root_ref(nil)).to be_nil
end
it 'returns nil when remote name is empty' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.not_to receive(:find_remote_root_ref)
- expect(repository.find_remote_root_ref('', '')).to be_nil
+ expect(repository.find_remote_root_ref('')).to be_nil
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RemoteService, :find_remote_root_ref do
- subject { repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL) }
+ subject { repository.find_remote_root_ref(SeedHelper::GITLAB_GIT_TEST_REPO_URL) }
end
end
@@ -950,44 +952,23 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:expected_commits) { 1 }
let(:revisions) { [new_commit] }
- shared_examples 'an enumeration of new commits' do
- it 'enumerates commits' do
- commits = repository.new_commits(revisions).to_a
-
- expect(commits.size).to eq(expected_commits)
- commits.each do |commit|
- expect(commit.id).to eq(new_commit)
- expect(commit.message).to eq("Message")
- end
+ before do
+ expect_next_instance_of(Gitlab::GitalyClient::CommitService) do |service|
+ expect(service)
+ .to receive(:list_commits)
+ .with([new_commit, '--not', '--all'])
+ .and_call_original
end
end
- context 'with list_commits disabled' do
- before do
- stub_feature_flags(list_commits: false)
+ it 'enumerates commits' do
+ commits = repository.new_commits(revisions).to_a
- expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service|
- expect(service)
- .to receive(:list_new_commits)
- .with(new_commit)
- .and_call_original
- end
+ expect(commits.size).to eq(expected_commits)
+ commits.each do |commit|
+ expect(commit.id).to eq(new_commit)
+ expect(commit.message).to eq("Message")
end
-
- it_behaves_like 'an enumeration of new commits'
- end
-
- context 'with list_commits enabled' do
- before do
- expect_next_instance_of(Gitlab::GitalyClient::CommitService) do |service|
- expect(service)
- .to receive(:list_commits)
- .with([new_commit, '--not', '--all'])
- .and_call_original
- end
- end
-
- it_behaves_like 'an enumeration of new commits'
end
end
@@ -1750,43 +1731,61 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
- describe '#write_config' do
- before do
- repository_rugged.config["gitlab.fullpath"] = repository_path
- end
+ describe '#set_full_path' do
+ shared_examples '#set_full_path' do
+ before do
+ repository_rugged.config["gitlab.fullpath"] = repository_path
+ end
- context 'is given a path' do
- it 'writes it to disk' do
- repository.write_config(full_path: "not-the/real-path.git")
+ context 'is given a path' do
+ it 'writes it to disk' do
+ repository.set_full_path(full_path: "not-the/real-path.git")
- config = File.read(File.join(repository_path, "config"))
+ config = File.read(File.join(repository_path, "config"))
- expect(config).to include("[gitlab]")
- expect(config).to include("fullpath = not-the/real-path.git")
+ expect(config).to include("[gitlab]")
+ expect(config).to include("fullpath = not-the/real-path.git")
+ end
end
- end
- context 'it is given an empty path' do
- it 'does not write it to disk' do
- repository.write_config(full_path: "")
+ context 'it is given an empty path' do
+ it 'does not write it to disk' do
+ repository.set_full_path(full_path: "")
- config = File.read(File.join(repository_path, "config"))
+ config = File.read(File.join(repository_path, "config"))
- expect(config).to include("[gitlab]")
- expect(config).to include("fullpath = #{repository_path}")
+ expect(config).to include("[gitlab]")
+ expect(config).to include("fullpath = #{repository_path}")
+ end
+ end
+
+ context 'repository does not exist' do
+ it 'raises NoRepository and does not call Gitaly WriteConfig' do
+ repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '', 'group/project')
+
+ expect(repository.gitaly_repository_client).not_to receive(:set_full_path)
+
+ expect do
+ repository.set_full_path(full_path: 'foo/bar.git')
+ end.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
end
end
- context 'repository does not exist' do
- it 'raises NoRepository and does not call Gitaly WriteConfig' do
- repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '', 'group/project')
+ context 'with :set_full_path enabled' do
+ before do
+ stub_feature_flags(set_full_path: true)
+ end
- expect(repository.gitaly_repository_client).not_to receive(:write_config)
+ it_behaves_like '#set_full_path'
+ end
- expect do
- repository.write_config(full_path: 'foo/bar.git')
- end.to raise_error(Gitlab::Git::Repository::NoRepository)
+ context 'with :set_full_path disabled' do
+ before do
+ stub_feature_flags(set_full_path: false)
end
+
+ it_behaves_like '#set_full_path'
end
end
@@ -1813,34 +1812,6 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
- describe '#delete_config' do
- let(:repository) { mutable_repository }
- let(:entries) do
- {
- 'test.foo1' => 'bla bla',
- 'test.foo2' => 1234,
- 'test.foo3' => true
- }
- end
-
- it 'can delete config settings' do
- entries.each do |key, value|
- repository_rugged.config[key] = value
- end
-
- expect(repository.delete_config(*%w[does.not.exist test.foo1 test.foo2])).to be_nil
-
- # Workaround for https://github.com/libgit2/rugged/issues/785: If
- # Gitaly changes .gitconfig while Rugged has the file loaded
- # Rugged::Repository#each_key will report stale values unless a
- # lookup is done first.
- expect(repository_rugged.config['test.foo1']).to be_nil
- config_keys = repository_rugged.config.each_key.to_a
- expect(config_keys).not_to include('test.foo1')
- expect(config_keys).not_to include('test.foo2')
- end
- end
-
describe '#merge_to_ref' do
let(:repository) { mutable_repository }
let(:branch_head) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
@@ -2001,47 +1972,6 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
- describe 'remotes' do
- let(:repository) { mutable_repository }
- let(:remote_name) { 'my-remote' }
- let(:url) { 'http://my-repo.git' }
-
- after do
- ensure_seeds
- end
-
- describe '#add_remote' do
- let(:mirror_refmap) { '+refs/*:refs/*' }
-
- it 'added the remote' do
- begin
- repository_rugged.remotes.delete(remote_name)
- rescue Rugged::ConfigError
- end
-
- repository.add_remote(remote_name, url, mirror_refmap: mirror_refmap)
-
- expect(repository_rugged.remotes[remote_name]).not_to be_nil
- expect(repository_rugged.config["remote.#{remote_name}.mirror"]).to eq('true')
- expect(repository_rugged.config["remote.#{remote_name}.prune"]).to eq('true')
- expect(repository_rugged.config["remote.#{remote_name}.fetch"]).to eq(mirror_refmap)
- end
- end
-
- describe '#remove_remote' do
- it 'removes the remote' do
- repository_rugged.remotes.create(remote_name, url)
-
- expect(repository.remove_remote(remote_name)).to be true
-
- # Since we deleted the remote via Gitaly, Rugged doesn't know
- # this changed underneath it. Let's refresh the Rugged repo.
- repository_rugged = Rugged::Repository.new(repository_path)
- expect(repository_rugged.remotes[remote_name]).to be_nil
- end
- end
- end
-
describe '#bundle_to_disk' do
let(:save_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index b6ff76c5e1c..79ae47f8a7b 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it { expect(tag.tagger.timezone).to eq("+0200") }
end
- describe 'signed tag' do
+ shared_examples 'signed tag' do
let(:project) { create(:project, :repository) }
let(:tag) { project.repository.find_tag('v1.1.1') }
@@ -54,6 +54,18 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it { expect(tag.tagger.timezone).to eq("+0100") }
end
+ context 'with :get_tag_signatures enabled' do
+ it_behaves_like 'signed tag'
+ end
+
+ context 'with :get_tag_signatures disabled' do
+ before do
+ stub_feature_flags(get_tag_signatures: false)
+ end
+
+ it_behaves_like 'signed tag'
+ end
+
it { expect(repository.tags.size).to eq(SeedRepo::Repo::TAGS.size) }
end
@@ -77,6 +89,75 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
end
end
+ describe '.extract_signature_lazily' do
+ let(:project) { create(:project, :repository) }
+
+ subject { described_class.extract_signature_lazily(project.repository, tag_id).itself }
+
+ context 'when the tag is signed' do
+ let(:tag_id) { project.repository.find_tag('v1.1.1').id }
+
+ it 'returns signature and signed text' do
+ signature, signed_text = subject
+
+ expect(signature).to eq(X509Helpers::User1.signed_tag_signature.chomp)
+ expect(signature).to be_a_binary_string
+ expect(signed_text).to eq(X509Helpers::User1.signed_tag_base_data)
+ expect(signed_text).to be_a_binary_string
+ end
+ end
+
+ context 'when the tag has no signature' do
+ let(:tag_id) { project.repository.find_tag('v1.0.0').id }
+
+ it 'returns empty signature and message as signed text' do
+ signature, signed_text = subject
+
+ expect(signature).to be_empty
+ expect(signed_text).to eq(X509Helpers::User1.unsigned_tag_base_data)
+ expect(signed_text).to be_a_binary_string
+ end
+ end
+
+ context 'when the tag cannot be found' do
+ let(:tag_id) { Gitlab::Git::BLANK_SHA }
+
+ it 'raises GRPC::Internal' do
+ expect { subject }.to raise_error(GRPC::Internal)
+ end
+ end
+
+ context 'when the tag ID is invalid' do
+ let(:tag_id) { '4b4918a572fa86f9771e5ba40fbd48e' }
+
+ it 'raises GRPC::Internal' do
+ expect { subject }.to raise_error(GRPC::Internal)
+ end
+ end
+
+ context 'when loading signatures in batch once' do
+ it 'fetches signatures in batch once' do
+ tag_ids = [project.repository.find_tag('v1.1.1').id, project.repository.find_tag('v1.0.0').id]
+ signatures = tag_ids.map do |tag_id|
+ described_class.extract_signature_lazily(repository, tag_id)
+ end
+
+ other_repository = double(:repository)
+ described_class.extract_signature_lazily(other_repository, tag_ids.first)
+
+ expect(described_class).to receive(:batch_signature_extraction)
+ .with(repository, tag_ids)
+ .once
+ .and_return({})
+
+ expect(described_class).not_to receive(:batch_signature_extraction)
+ .with(other_repository, tag_ids.first)
+
+ 2.times { signatures.each(&:itself) }
+ end
+ end
+ end
+
describe 'tag into from Gitaly tag' do
context 'message_size != message.size' do
let(:gitaly_tag) { build(:gitaly_tag, message: ''.b, message_size: message_size) }
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index c44d7e44751..f11d84bd8d3 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -6,29 +6,44 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
shared_examples :repo do
- let(:tree) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID) }
+ subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, pagination_params) }
- it { expect(tree).to be_kind_of Array }
- it { expect(tree.empty?).to be_falsey }
- it { expect(tree.count(&:dir?)).to eq(2) }
- it { expect(tree.count(&:file?)).to eq(10) }
- it { expect(tree.count(&:submodule?)).to eq(2) }
+ let(:sha) { SeedRepo::Commit::ID }
+ let(:path) { nil }
+ let(:recursive) { false }
+ let(:pagination_params) { nil }
- it 'returns an empty array when called with an invalid ref' do
- expect(described_class.where(repository, 'foobar-does-not-exist')).to eq([])
+ let(:entries) { tree.first }
+ let(:cursor) { tree.second }
+
+ it { expect(entries).to be_kind_of Array }
+ it { expect(entries.empty?).to be_falsey }
+ it { expect(entries.count(&:dir?)).to eq(2) }
+ it { expect(entries.count(&:file?)).to eq(10) }
+ it { expect(entries.count(&:submodule?)).to eq(2) }
+ it { expect(cursor&.next_cursor).to be_blank }
+
+ context 'with an invalid ref' do
+ let(:sha) { 'foobar-does-not-exist' }
+
+ it { expect(entries).to eq([]) }
+ it { expect(cursor).to be_nil }
end
- it 'returns a list of tree objects' do
- entries = described_class.where(repository, SeedRepo::Commit::ID, 'files', true)
+ context 'when path is provided' do
+ let(:path) { 'files' }
+ let(:recursive) { true }
- expect(entries.map(&:path)).to include('files/html',
- 'files/markdown/ruby-style-guide.md')
- expect(entries.count).to be >= 10
- expect(entries).to all(be_a(Gitlab::Git::Tree))
+ it 'returns a list of tree objects' do
+ expect(entries.map(&:path)).to include('files/html',
+ 'files/markdown/ruby-style-guide.md')
+ expect(entries.count).to be >= 10
+ expect(entries).to all(be_a(Gitlab::Git::Tree))
+ end
end
describe '#dir?' do
- let(:dir) { tree.select(&:dir?).first }
+ let(:dir) { entries.select(&:dir?).first }
it { expect(dir).to be_kind_of Gitlab::Git::Tree }
it { expect(dir.id).to eq('3c122d2b7830eca25235131070602575cf8b41a1') }
@@ -41,7 +56,8 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
context :subdir do
# rubocop: disable Rails/FindBy
# This is not ActiveRecord where..first
- let(:subdir) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID, 'files').first }
+ let(:path) { 'files' }
+ let(:subdir) { entries.first }
# rubocop: enable Rails/FindBy
it { expect(subdir).to be_kind_of Gitlab::Git::Tree }
@@ -55,7 +71,8 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
context :subdir_file do
# rubocop: disable Rails/FindBy
# This is not ActiveRecord where..first
- let(:subdir_file) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID, 'files/ruby').first }
+ let(:path) { 'files/ruby' }
+ let(:subdir_file) { entries.first }
# rubocop: enable Rails/FindBy
it { expect(subdir_file).to be_kind_of Gitlab::Git::Tree }
@@ -68,10 +85,11 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
context :flat_path do
let(:filename) { 'files/flat/path/correct/content.txt' }
- let(:oid) { create_file(filename) }
+ let(:sha) { create_file(filename) }
+ let(:path) { 'files/flat' }
# rubocop: disable Rails/FindBy
# This is not ActiveRecord where..first
- let(:subdir_file) { Gitlab::Git::Tree.where(repository, oid, 'files/flat').first }
+ let(:subdir_file) { entries.first }
# rubocop: enable Rails/FindBy
let(:repository_rugged) { Rugged::Repository.new(File.join(SEED_STORAGE_PATH, TEST_REPO_PATH)) }
@@ -116,7 +134,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
describe '#file?' do
- let(:file) { tree.select(&:file?).first }
+ let(:file) { entries.select(&:file?).first }
it { expect(file).to be_kind_of Gitlab::Git::Tree }
it { expect(file.id).to eq('dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82') }
@@ -125,21 +143,21 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
describe '#readme?' do
- let(:file) { tree.select(&:readme?).first }
+ let(:file) { entries.select(&:readme?).first }
it { expect(file).to be_kind_of Gitlab::Git::Tree }
it { expect(file.name).to eq('README.md') }
end
describe '#contributing?' do
- let(:file) { tree.select(&:contributing?).first }
+ let(:file) { entries.select(&:contributing?).first }
it { expect(file).to be_kind_of Gitlab::Git::Tree }
it { expect(file.name).to eq('CONTRIBUTING.md') }
end
describe '#submodule?' do
- let(:submodule) { tree.select(&:submodule?).first }
+ let(:submodule) { entries.select(&:submodule?).first }
it { expect(submodule).to be_kind_of Gitlab::Git::Tree }
it { expect(submodule.id).to eq('79bceae69cb5750d6567b223597999bfa91cb3b9') }
@@ -149,7 +167,16 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
describe '.where with Gitaly enabled' do
- it_behaves_like :repo
+ it_behaves_like :repo do
+ context 'with pagination parameters' do
+ let(:pagination_params) { { limit: 3, page_token: nil } }
+
+ it 'returns paginated list of tree objects' do
+ expect(entries.count).to eq(3)
+ expect(cursor.next_cursor).to be_present
+ end
+ end
+ end
end
describe '.where with Rugged enabled', :enable_rugged do
@@ -161,6 +188,15 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
described_class.where(repository, SeedRepo::Commit::ID, 'files', false)
end
- it_behaves_like :repo
+ it_behaves_like :repo do
+ context 'with pagination parameters' do
+ let(:pagination_params) { { limit: 3, page_token: nil } }
+
+ it 'does not support pagination' do
+ expect(entries.count).to be >= 10
+ expect(cursor).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 3b85e3ddd1d..d690a4b2db4 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -382,12 +382,11 @@ RSpec.describe Gitlab::GitAccessSnippet do
it_behaves_like 'a push to repository to make it over the limit'
end
- shared_examples_for 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset' do
+ context 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset' do
let(:change_size) { 200 }
before do
- stub_feature_flags(git_access_batched_changes_size: batched)
- allow(snippet.repository).to receive(expected_call).and_return(
+ allow(snippet.repository).to receive(:blobs).and_return(
[double(:blob, size: change_size)]
)
end
@@ -396,20 +395,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
it_behaves_like 'a push to repository below the limit'
it_behaves_like 'a push to repository to make it over the limit'
end
-
- context 'when batched computation is enabled' do
- let(:batched) { true }
- let(:expected_call) { :blobs }
-
- it_behaves_like 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset'
- end
-
- context 'when batched computation is disabled' do
- let(:batched) { false }
- let(:expected_call) { :new_blobs }
-
- it_behaves_like 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset'
- end
end
describe 'HEAD realignment' do
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 22c29403255..a0e2d43cf45 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -169,7 +169,11 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
end
describe '#tree_entries' do
+ subject { client.tree_entries(repository, revision, path, recursive, pagination_params) }
+
let(:path) { '/' }
+ let(:recursive) { false }
+ let(:pagination_params) { nil }
it 'sends a get_tree_entries message' do
expect_any_instance_of(Gitaly::CommitService::Stub)
@@ -177,7 +181,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return([])
- client.tree_entries(repository, revision, path, false)
+ is_expected.to eq([[], nil])
end
context 'with UTF-8 params strings' do
@@ -190,7 +194,26 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return([])
- client.tree_entries(repository, revision, path, false)
+ is_expected.to eq([[], nil])
+ end
+ end
+
+ context 'with pagination parameters' do
+ let(:pagination_params) { { limit: 3, page_token: nil } }
+
+ it 'responds with a pagination cursor' do
+ pagination_cursor = Gitaly::PaginationCursor.new(next_cursor: 'aabbccdd')
+ response = Gitaly::GetTreeEntriesResponse.new(
+ entries: [],
+ pagination_cursor: pagination_cursor
+ )
+
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return([response])
+
+ is_expected.to eq([[], pagination_cursor])
end
end
end
@@ -320,6 +343,92 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
end
end
+ describe '#list_new_commits' do
+ let(:revisions) { [revision] }
+ let(:gitaly_commits) { create_list(:gitaly_commit, 3) }
+ let(:commits) { gitaly_commits.map { |c| Gitlab::Git::Commit.new(repository, c) }}
+
+ subject { client.list_new_commits(revisions, allow_quarantine: allow_quarantine) }
+
+ shared_examples 'a #list_all_commits message' do
+ it 'sends a list_all_commits message' do
+ expected_repository = repository.gitaly_repository.dup
+ expected_repository.git_alternate_object_directories = Google::Protobuf::RepeatedField.new(:string)
+
+ expect_next_instance_of(Gitaly::CommitService::Stub) do |service|
+ expect(service).to receive(:list_all_commits)
+ .with(gitaly_request_with_params(repository: expected_repository), kind_of(Hash))
+ .and_return([Gitaly::ListAllCommitsResponse.new(commits: gitaly_commits)])
+ end
+
+ expect(subject).to eq(commits)
+ end
+ end
+
+ shared_examples 'a #list_commits message' do
+ it 'sends a list_commits message' do
+ expect_next_instance_of(Gitaly::CommitService::Stub) do |service|
+ expect(service).to receive(:list_commits)
+ .with(gitaly_request_with_params(revisions: revisions + %w[--not --all]), kind_of(Hash))
+ .and_return([Gitaly::ListCommitsResponse.new(commits: gitaly_commits)])
+ end
+
+ expect(subject).to eq(commits)
+ end
+ end
+
+ before do
+ ::Gitlab::GitalyClient.clear_stubs!
+
+ allow(Gitlab::Git::HookEnv)
+ .to receive(:all)
+ .with(repository.gl_repository)
+ .and_return(git_env)
+ end
+
+ context 'with hook environment' do
+ let(:git_env) do
+ {
+ 'GIT_OBJECT_DIRECTORY_RELATIVE' => '.git/objects',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['/dir/one', '/dir/two']
+ }
+ end
+
+ context 'with allowed quarantine' do
+ let(:allow_quarantine) { true }
+
+ it_behaves_like 'a #list_all_commits message'
+ end
+
+ context 'with disallowed quarantine' do
+ let(:allow_quarantine) { false }
+
+ it_behaves_like 'a #list_commits message'
+ end
+ end
+
+ context 'without hook environment' do
+ let(:git_env) do
+ {
+ 'GIT_OBJECT_DIRECTORY_RELATIVE' => '',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => []
+ }
+ end
+
+ context 'with allowed quarantine' do
+ let(:allow_quarantine) { true }
+
+ it_behaves_like 'a #list_commits message'
+ end
+
+ context 'with disallowed quarantine' do
+ let(:allow_quarantine) { false }
+
+ it_behaves_like 'a #list_commits message'
+ end
+ end
+ end
+
describe '#commit_stats' do
let(:request) do
Gitaly::CommitStatsRequest.new(
diff --git a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
index 0bb8628af6c..0eecdfcb630 100644
--- a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
@@ -9,22 +9,37 @@ RSpec.describe Gitlab::GitalyClient::ConflictFilesStitcher do
target_repository = target_project.repository.raw
target_gitaly_repository = target_repository.gitaly_repository
+ ancestor_path_1 = 'ancestor/path/1'
our_path_1 = 'our/path/1'
their_path_1 = 'their/path/1'
our_mode_1 = 0744
commit_oid_1 = 'f00'
content_1 = 'content of the first file'
+ ancestor_path_2 = 'ancestor/path/2'
our_path_2 = 'our/path/2'
their_path_2 = 'their/path/2'
our_mode_2 = 0600
commit_oid_2 = 'ba7'
content_2 = 'content of the second file'
- header_1 = double(repository: target_gitaly_repository, commit_oid: commit_oid_1,
- our_path: our_path_1, their_path: their_path_1, our_mode: our_mode_1)
- header_2 = double(repository: target_gitaly_repository, commit_oid: commit_oid_2,
- our_path: our_path_2, their_path: their_path_2, our_mode: our_mode_2)
+ header_1 = double(
+ repository: target_gitaly_repository,
+ commit_oid: commit_oid_1,
+ ancestor_path: ancestor_path_1,
+ our_path: our_path_1,
+ their_path: their_path_1,
+ our_mode: our_mode_1
+ )
+
+ header_2 = double(
+ repository: target_gitaly_repository,
+ commit_oid: commit_oid_2,
+ ancestor_path: ancestor_path_2,
+ our_path: our_path_2,
+ their_path: their_path_2,
+ our_mode: our_mode_2
+ )
messages = [
double(files: [double(header: header_1), double(header: nil, content: content_1[0..5])]),
@@ -39,6 +54,7 @@ RSpec.describe Gitlab::GitalyClient::ConflictFilesStitcher do
expect(conflict_files.size).to be(2)
expect(conflict_files[0].content).to eq(content_1)
+ expect(conflict_files[0].ancestor_path).to eq(ancestor_path_1)
expect(conflict_files[0].their_path).to eq(their_path_1)
expect(conflict_files[0].our_path).to eq(our_path_1)
expect(conflict_files[0].our_mode).to be(our_mode_1)
@@ -46,6 +62,7 @@ RSpec.describe Gitlab::GitalyClient::ConflictFilesStitcher do
expect(conflict_files[0].commit_oid).to eq(commit_oid_1)
expect(conflict_files[1].content).to eq(content_2)
+ expect(conflict_files[1].ancestor_path).to eq(ancestor_path_2)
expect(conflict_files[1].their_path).to eq(their_path_2)
expect(conflict_files[1].our_path).to eq(our_path_2)
expect(conflict_files[1].our_mode).to be(our_mode_2)
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
index e90cb966917..89a41ae71f3 100644
--- a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -15,18 +15,31 @@ RSpec.describe Gitlab::GitalyClient::ConflictsService do
end
describe '#list_conflict_files' do
+ let(:allow_tree_conflicts) { false }
let(:request) do
Gitaly::ListConflictFilesRequest.new(
- repository: target_gitaly_repository, our_commit_oid: our_commit_oid,
- their_commit_oid: their_commit_oid
+ repository: target_gitaly_repository,
+ our_commit_oid: our_commit_oid,
+ their_commit_oid: their_commit_oid,
+ allow_tree_conflicts: allow_tree_conflicts
)
end
- it 'sends an RPC request' do
- expect_any_instance_of(Gitaly::ConflictsService::Stub).to receive(:list_conflict_files)
- .with(request, kind_of(Hash)).and_return([].to_enum)
+ shared_examples_for 'listing conflicts' do
+ it 'sends an RPC request' do
+ expect_any_instance_of(Gitaly::ConflictsService::Stub).to receive(:list_conflict_files)
+ .with(request, kind_of(Hash)).and_return([].to_enum)
+
+ client.list_conflict_files(allow_tree_conflicts: allow_tree_conflicts)
+ end
+ end
+
+ it_behaves_like 'listing conflicts'
+
+ context 'when allow_tree_conflicts is set to true' do
+ let(:allow_tree_conflicts) { true }
- client.list_conflict_files
+ it_behaves_like 'listing conflicts'
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index a4c6e30bba8..e19be965e68 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -178,6 +178,17 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
+ describe '#get_tag_signatures' do
+ it 'sends a get_tag_signatures message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:get_tag_signatures)
+ .with(gitaly_request_with_params(tag_revisions: ['some_tag_id']), kind_of(Hash))
+ .and_return([])
+
+ client.get_tag_signatures(['some_tag_id'])
+ end
+ end
+
describe '#find_ref_name', :seed_helper do
subject { client.find_ref_name(SeedRepo::Commit::ID, 'refs/heads/master') }
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index 2ec5f70be76..3d0f8358406 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -6,36 +6,9 @@ RSpec.describe Gitlab::GitalyClient::RemoteService do
let(:project) { create(:project) }
let(:storage_name) { project.repository_storage }
let(:relative_path) { project.disk_path + '.git' }
- let(:remote_name) { 'my-remote' }
let(:client) { described_class.new(project.repository) }
- describe '#add_remote' do
- let(:url) { 'http://my-repo.git' }
- let(:mirror_refmap) { :all_refs }
-
- it 'sends an add_remote message' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:add_remote)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(:add_remote_response))
-
- client.add_remote(remote_name, url, mirror_refmap)
- end
- end
-
- describe '#remove_remote' do
- it 'sends an remove_remote message and returns the result value' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:remove_remote)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(result: true))
-
- expect(client.remove_remote(remote_name)).to be(true)
- end
- end
-
describe '#find_remote_root_ref' do
- let(:remote) { 'origin' }
let(:url) { 'http://git.example.com/my-repo.git' }
let(:auth) { 'Basic secret' }
let(:expected_params) { { remote_url: url, http_authorization_header: auth } }
@@ -47,7 +20,7 @@ RSpec.describe Gitlab::GitalyClient::RemoteService do
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return(double(ref: 'master'))
- expect(client.find_remote_root_ref(remote, url, auth)).to eq 'master'
+ expect(client.find_remote_root_ref(url, auth)).to eq 'master'
end
it 'ensure ref is a valid UTF-8 string' do
@@ -57,39 +30,24 @@ RSpec.describe Gitlab::GitalyClient::RemoteService do
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return(double(ref: "an_invalid_ref_\xE5"))
- expect(client.find_remote_root_ref(remote, url, auth)).to eq "an_invalid_ref_å"
+ expect(client.find_remote_root_ref(url, auth)).to eq "an_invalid_ref_å"
end
end
describe '#update_remote_mirror' do
- let(:ref_name) { 'remote_mirror_1' }
let(:only_branches_matching) { %w[my-branch master] }
let(:ssh_key) { 'KEY' }
let(:known_hosts) { 'KNOWN HOSTS' }
+ let(:url) { 'http:://git.example.com/my-repo.git' }
+ let(:expected_params) { { remote: Gitaly::UpdateRemoteMirrorRequest::Remote.new(url: url) } }
- shared_examples 'an update' do
- it 'sends an update_remote_mirror message' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:update_remote_mirror)
- .with(array_including(gitaly_request_with_params(expected_params)), kind_of(Hash))
- .and_return(double(:update_remote_mirror_response))
-
- client.update_remote_mirror(ref_name, url, only_branches_matching, ssh_key: ssh_key, known_hosts: known_hosts, keep_divergent_refs: true)
- end
- end
-
- context 'with remote name' do
- let(:url) { nil }
- let(:expected_params) { { ref_name: ref_name } }
-
- it_behaves_like 'an update'
- end
-
- context 'with remote URL' do
- let(:url) { 'http:://git.example.com/my-repo.git' }
- let(:expected_params) { { remote: Gitaly::UpdateRemoteMirrorRequest::Remote.new(url: url) } }
+ it 'sends an update_remote_mirror message' do
+ expect_any_instance_of(Gitaly::RemoteService::Stub)
+ .to receive(:update_remote_mirror)
+ .with(array_including(gitaly_request_with_params(expected_params)), kind_of(Hash))
+ .and_return(double(:update_remote_mirror_response))
- it_behaves_like 'an update'
+ client.update_remote_mirror(url, only_branches_matching, ssh_key: ssh_key, known_hosts: known_hosts, keep_divergent_refs: true)
end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 53805d67f9f..4b037d3f836 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -122,89 +122,75 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
describe '#fetch_remote' do
- shared_examples 'a fetch' do
- it 'sends a fetch_remote_request message' do
- expected_remote_params = Gitaly::Remote.new(
- url: url, http_authorization_header: "", mirror_refmaps: [])
-
- expected_request = gitaly_request_with_params(
- remote: remote,
- remote_params: url ? expected_remote_params : nil,
- ssh_key: '',
- known_hosts: '',
- force: false,
- no_tags: false,
- no_prune: false,
- check_tags_changed: false
- )
-
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:fetch_remote)
- .with(expected_request, kind_of(Hash))
- .and_return(double(value: true))
-
- client.fetch_remote(remote, url: url, refmap: nil, ssh_auth: nil, forced: false, no_tags: false, timeout: 1, check_tags_changed: false)
- end
+ let(:url) { 'https://example.com/git/repo.git' }
+
+ it 'sends a fetch_remote_request message' do
+ expected_request = gitaly_request_with_params(
+ remote_params: Gitaly::Remote.new(
+ url: url,
+ http_authorization_header: "",
+ mirror_refmaps: []
+ ),
+ ssh_key: '',
+ known_hosts: '',
+ force: false,
+ no_tags: false,
+ no_prune: false,
+ check_tags_changed: false
+ )
- context 'SSH auth' do
- where(:ssh_mirror_url, :ssh_key_auth, :ssh_private_key, :ssh_known_hosts, :expected_params) do
- false | false | 'key' | 'known_hosts' | {}
- false | true | 'key' | 'known_hosts' | {}
- true | false | 'key' | 'known_hosts' | { known_hosts: 'known_hosts' }
- true | true | 'key' | 'known_hosts' | { ssh_key: 'key', known_hosts: 'known_hosts' }
- true | true | 'key' | nil | { ssh_key: 'key' }
- true | true | nil | 'known_hosts' | { known_hosts: 'known_hosts' }
- true | true | nil | nil | {}
- true | true | '' | '' | {}
- end
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:fetch_remote)
+ .with(expected_request, kind_of(Hash))
+ .and_return(double(value: true))
- with_them do
- let(:ssh_auth) do
- double(
- :ssh_auth,
- ssh_mirror_url?: ssh_mirror_url,
- ssh_key_auth?: ssh_key_auth,
- ssh_private_key: ssh_private_key,
- ssh_known_hosts: ssh_known_hosts
- )
- end
-
- it do
- expected_remote_params = Gitaly::Remote.new(
- url: url, http_authorization_header: "", mirror_refmaps: [])
-
- expected_request = gitaly_request_with_params({
- remote: remote,
- remote_params: url ? expected_remote_params : nil,
- ssh_key: '',
- known_hosts: '',
- force: false,
- no_tags: false,
- no_prune: false
- }.update(expected_params))
-
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:fetch_remote)
- .with(expected_request, kind_of(Hash))
- .and_return(double(value: true))
-
- client.fetch_remote(remote, url: url, refmap: nil, ssh_auth: ssh_auth, forced: false, no_tags: false, timeout: 1)
- end
- end
- end
+ client.fetch_remote(url, refmap: nil, ssh_auth: nil, forced: false, no_tags: false, timeout: 1, check_tags_changed: false)
end
- context 'with remote' do
- it_behaves_like 'a fetch' do
- let(:remote) { 'remote-name' }
- let(:url) { nil }
+ context 'SSH auth' do
+ where(:ssh_mirror_url, :ssh_key_auth, :ssh_private_key, :ssh_known_hosts, :expected_params) do
+ false | false | 'key' | 'known_hosts' | {}
+ false | true | 'key' | 'known_hosts' | {}
+ true | false | 'key' | 'known_hosts' | { known_hosts: 'known_hosts' }
+ true | true | 'key' | 'known_hosts' | { ssh_key: 'key', known_hosts: 'known_hosts' }
+ true | true | 'key' | nil | { ssh_key: 'key' }
+ true | true | nil | 'known_hosts' | { known_hosts: 'known_hosts' }
+ true | true | nil | nil | {}
+ true | true | '' | '' | {}
end
- end
- context 'with URL' do
- it_behaves_like 'a fetch' do
- let(:remote) { "" }
- let(:url) { 'https://example.com/git/repo.git' }
+ with_them do
+ let(:ssh_auth) do
+ double(
+ :ssh_auth,
+ ssh_mirror_url?: ssh_mirror_url,
+ ssh_key_auth?: ssh_key_auth,
+ ssh_private_key: ssh_private_key,
+ ssh_known_hosts: ssh_known_hosts
+ )
+ end
+
+ it do
+ expected_request = gitaly_request_with_params({
+ remote_params: Gitaly::Remote.new(
+ url: url,
+ http_authorization_header: "",
+ mirror_refmaps: []
+ ),
+ ssh_key: '',
+ known_hosts: '',
+ force: false,
+ no_tags: false,
+ no_prune: false
+ }.update(expected_params))
+
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:fetch_remote)
+ .with(expected_request, kind_of(Hash))
+ .and_return(double(value: true))
+
+ client.fetch_remote(url, refmap: nil, ssh_auth: ssh_auth, forced: false, no_tags: false, timeout: 1)
+ end
end
end
end
@@ -333,4 +319,17 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
client.replicate(source_repository)
end
end
+
+ describe '#set_full_path' do
+ let(:path) { 'repo/path' }
+
+ it 'sends a set_full_path message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:set_full_path)
+ .with(gitaly_request_with_params(path: path), kind_of(Hash))
+ .and_return(double)
+
+ client.set_full_path(path)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
index 63dce51c5da..6c94973b5a8 100644
--- a/spec/lib/gitlab/github_import/bulk_importing_spec.rb
+++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
@@ -3,8 +3,20 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::BulkImporting do
- let(:importer) do
- Class.new { include(Gitlab::GithubImport::BulkImporting) }.new
+ let(:project) { instance_double(Project, id: 1) }
+ let(:importer) { MyImporter.new(project, double) }
+ let(:importer_class) do
+ Class.new do
+ include Gitlab::GithubImport::BulkImporting
+
+ def object_type
+ :object_type
+ end
+ end
+ end
+
+ before do
+ stub_const 'MyImporter', importer_class
end
describe '#build_database_rows' do
@@ -21,6 +33,24 @@ RSpec.describe Gitlab::GithubImport::BulkImporting do
.with(object)
.and_return(false)
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .with(
+ import_type: :github,
+ project_id: 1,
+ importer: 'MyImporter',
+ message: '1 object_types fetched'
+ )
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .with(
+ project,
+ :object_type,
+ :fetched,
+ value: 1
+ )
+
enum = [[object, 1]].to_enum
expect(importer.build_database_rows(enum)).to eq([{ title: 'Foo' }])
@@ -37,6 +67,24 @@ RSpec.describe Gitlab::GithubImport::BulkImporting do
.with(object)
.and_return(true)
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .with(
+ import_type: :github,
+ project_id: 1,
+ importer: 'MyImporter',
+ message: '0 object_types fetched'
+ )
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .with(
+ project,
+ :object_type,
+ :fetched,
+ value: 0
+ )
+
enum = [[object, 1]].to_enum
expect(importer.build_database_rows(enum)).to be_empty
@@ -48,12 +96,32 @@ RSpec.describe Gitlab::GithubImport::BulkImporting do
rows = [{ title: 'Foo' }] * 10
model = double(:model, table_name: 'kittens')
- expect(Gitlab::Database)
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .twice
+ .with(
+ import_type: :github,
+ project_id: 1,
+ importer: 'MyImporter',
+ message: '5 object_types imported'
+ )
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .twice
+ .with(
+ project,
+ :object_type,
+ :imported,
+ value: 5
+ )
+
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.ordered
.with('kittens', rows.first(5))
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.ordered
.with('kittens', rows.last(5))
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 9eea85526f5..0af840d2c10 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
describe '#execute' do
context 'when the merge request no longer exists' do
it 'does not import anything' do
- expect(Gitlab::Database).not_to receive(:bulk_insert)
+ expect(Gitlab::Database.main).not_to receive(:bulk_insert)
importer.execute
end
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
.to receive(:author_id_for)
.and_return([user.id, true])
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(
LegacyDiffNote.table_name,
@@ -89,7 +89,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
.to receive(:author_id_for)
.and_return([project.creator_id, false])
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(
LegacyDiffNote.table_name,
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
.to receive(:author_id_for)
.and_return([project.creator_id, false])
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index fb826c987e1..0926000428c 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
.with(issue.assignees[1])
.and_return(5)
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(
IssueAssignee.table_name,
diff --git a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
index 6d143f78c66..241a0fef600 100644
--- a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelLinksImporter do
.and_return(1)
freeze_time do
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(
LabelLink.table_name,
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelLinksImporter do
.with('bug')
.and_return(nil)
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(LabelLink.table_name, [])
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 8ee534734f0..a2c7d51214a 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let_it_be(:project) { create(:project, :import_started) }
+
let(:client) { double(:client) }
let(:download_link) { "http://www.gitlab.com/lfs_objects/oid" }
@@ -61,27 +62,12 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
.and_raise(exception)
end
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- parallel: false,
- importer: 'Gitlab::GithubImport::Importer::LfsObjectImporter',
- 'error.message': 'Invalid Project URL'
- )
- end
-
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
.with(
- exception,
- import_source: :github,
- parallel: false,
project_id: project.id,
- importer: 'Gitlab::GithubImport::Importer::LfsObjectImporter'
+ exception: exception,
+ error_source: 'Gitlab::GithubImport::Importer::LfsObjectImporter'
).and_call_original
importer.execute
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index ef0bb90db4a..820f46c7286 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
.with(github_note)
.and_return([user.id, true])
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(
Note.table_name,
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
.with(github_note)
.and_return([project.creator_id, false])
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(
Note.table_name,
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
context 'when the noteable does not exist' do
it 'does not import the note' do
- expect(Gitlab::Database).not_to receive(:bulk_insert)
+ expect(Gitlab::Database.main).not_to receive(:bulk_insert)
importer.execute
end
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
.with(github_note)
.and_return([user.id, true])
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index 133d515246a..067b8b09516 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -148,7 +148,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
end
end
- shared_examples '#update_repository' do
+ describe '#update_repository' do
it 'updates the repository' do
importer = described_class.new(project, client)
@@ -162,6 +162,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
.to receive(:increment)
.and_call_original
+ expect(project.repository)
+ .to receive(:fetch_remote)
+ .with(url, forced: false, refmap: Gitlab::GithubImport.refmap)
+
freeze_time do
importer.update_repository
@@ -170,28 +174,6 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
end
end
- describe '#update_repository with :fetch_remote_params enabled' do
- before do
- stub_feature_flags(fetch_remote_params: true)
- expect(project.repository)
- .to receive(:fetch_remote)
- .with('github', forced: false, url: url, refmap: Gitlab::GithubImport.refmap)
- end
-
- it_behaves_like '#update_repository'
- end
-
- describe '#update_repository with :fetch_remote_params disabled' do
- before do
- stub_feature_flags(fetch_remote_params: false)
- expect(project.repository)
- .to receive(:fetch_remote)
- .with('github', forced: false)
- end
-
- it_behaves_like '#update_repository'
- end
-
describe '#update_repository?' do
let(:importer) { described_class.new(project, client) }
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
index 08be350f0f9..c5fa67e50aa 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
@@ -27,100 +27,62 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#each_object_to_import', :clean_gitlab_redis_cache do
- context 'when github_review_importer_query_only_unimported_merge_requests is enabled' do
- before do
- stub_feature_flags(github_review_importer_query_only_unimported_merge_requests: true)
- end
-
- let(:merge_request) do
- create(
- :merged_merge_request,
- iid: 999,
- source_project: project,
- target_project: project
- )
- end
-
- let(:review) { double(id: 1) }
-
- it 'fetches the pull requests reviews data' do
- page = double(objects: [review], number: 1)
-
- expect(review)
- .to receive(:merge_request_id=)
- .with(merge_request.id)
-
- expect(client)
- .to receive(:each_page)
- .exactly(:once) # ensure to be cached on the second call
- .with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 1)
- .and_yield(page)
+ let(:merge_request) do
+ create(
+ :merged_merge_request,
+ iid: 999,
+ source_project: project,
+ target_project: project
+ )
+ end
- expect { |b| subject.each_object_to_import(&b) }
- .to yield_with_args(review)
+ let(:review) { double(id: 1) }
- subject.each_object_to_import {}
- end
+ it 'fetches the pull requests reviews data' do
+ page = double(objects: [review], number: 1)
- it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
- .new(project, "merge_request/#{merge_request.id}/pull_request_reviews")
- .set(2)
+ expect(review)
+ .to receive(:merge_request_id=)
+ .with(merge_request.id)
- expect(review).not_to receive(:merge_request_id=)
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 1)
+ .and_yield(page)
- expect(client)
- .to receive(:each_page)
- .exactly(:once) # ensure to be cached on the second call
- .with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 2)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(review)
- subject.each_object_to_import {}
- end
+ subject.each_object_to_import {}
+ end
- it 'skips cached merge requests' do
- Gitlab::Cache::Import::Caching.set_add(
- "github-importer/merge_request/already-imported/#{project.id}",
- merge_request.id
- )
+ it 'skips cached pages' do
+ Gitlab::GithubImport::PageCounter
+ .new(project, "merge_request/#{merge_request.id}/pull_request_reviews")
+ .set(2)
- expect(review).not_to receive(:merge_request_id=)
+ expect(review).not_to receive(:merge_request_id=)
- expect(client).not_to receive(:each_page)
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 2)
- subject.each_object_to_import {}
- end
+ subject.each_object_to_import {}
end
- context 'when github_review_importer_query_only_unimported_merge_requests is disabled' do
- before do
- stub_feature_flags(github_review_importer_query_only_unimported_merge_requests: false)
- end
-
- it 'fetchs the merged pull requests data' do
- merge_request = create(
- :merged_merge_request,
- iid: 999,
- source_project: project,
- target_project: project
- )
-
- review = double
-
- expect(review)
- .to receive(:merge_request_id=)
- .with(merge_request.id)
+ it 'skips cached merge requests' do
+ Gitlab::Cache::Import::Caching.set_add(
+ "github-importer/merge_request/already-imported/#{project.id}",
+ merge_request.id
+ )
- allow(client)
- .to receive(:pull_request_reviews)
- .exactly(:once) # ensure to be cached on the second call
- .with('github/repo', merge_request.iid)
- .and_return([review])
+ expect(review).not_to receive(:merge_request_id=)
- expect { |b| subject.each_object_to_import(&b) }
- .to yield_with_args(review)
+ expect(client).not_to receive(:each_page)
- subject.each_object_to_import {}
- end
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 3839303b881..58a8fb1b7e4 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -202,7 +202,7 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
expect(repository)
.to receive(:fetch_as_mirror)
- .with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true, remote_name: 'github')
+ .with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true)
service = double
expect(Repositories::HousekeepingService)
@@ -211,17 +211,6 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
expect(importer.import_repository).to eq(true)
end
-
- it 'marks the import as failed when an error was raised' do
- expect(project).to receive(:ensure_repository)
- .and_raise(Gitlab::Git::Repository::NoRepository)
-
- expect(importer)
- .to receive(:fail_import)
- .and_return(false)
-
- expect(importer.import_repository).to eq(false)
- end
end
describe '#import_wiki_repository' do
@@ -234,28 +223,40 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
expect(importer.import_wiki_repository).to eq(true)
end
- it 'marks the import as failed and creates an empty repo if an error was raised' do
- expect(wiki_repository)
- .to receive(:import_repository)
- .with(importer.wiki_url)
- .and_raise(Gitlab::Git::CommandError)
+ context 'when it raises a Gitlab::Git::CommandError' do
+ context 'when the error is not a "repository not exported"' do
+ it 'creates the wiki and re-raise the exception' do
+ exception = Gitlab::Git::CommandError.new
- expect(importer)
- .to receive(:fail_import)
- .and_return(false)
+ expect(wiki_repository)
+ .to receive(:import_repository)
+ .with(importer.wiki_url)
+ .and_raise(exception)
- expect(project)
- .to receive(:create_wiki)
+ expect(project)
+ .to receive(:create_wiki)
- expect(importer.import_wiki_repository).to eq(false)
- end
- end
+ expect { importer.import_wiki_repository }
+ .to raise_error(exception)
+ end
+ end
+
+ context 'when the error is a "repository not exported"' do
+ it 'returns true' do
+ exception = Gitlab::Git::CommandError.new('repository not exported')
- describe '#fail_import' do
- it 'marks the import as failed' do
- expect(project.import_state).to receive(:mark_as_failed).with('foo')
+ expect(wiki_repository)
+ .to receive(:import_repository)
+ .with(importer.wiki_url)
+ .and_raise(exception)
- expect(importer.fail_import('foo')).to eq(false)
+ expect(project)
+ .not_to receive(:create_wiki)
+
+ expect(importer.import_wiki_repository)
+ .to eq(true)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/logger_spec.rb b/spec/lib/gitlab/github_import/logger_spec.rb
new file mode 100644
index 00000000000..6fd0f5db93e
--- /dev/null
+++ b/spec/lib/gitlab/github_import/logger_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Logger do
+ subject(:logger) { described_class.new('/dev/null') }
+
+ let(:now) { Time.zone.now }
+
+ describe '#format_message' do
+ before do
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('new-correlation-id')
+ end
+
+ it 'formats strings' do
+ output = subject.format_message('INFO', now, 'test', 'Hello world')
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'message' => 'Hello world',
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers',
+ 'import_type' => 'github'
+ })
+ end
+
+ it 'formats hashes' do
+ output = subject.format_message('INFO', now, 'test', { hello: 1 })
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'hello' => 1,
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers',
+ 'import_type' => 'github'
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb
index 668c11667b5..c9e4ac67061 100644
--- a/spec/lib/gitlab/github_import/object_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/object_counter_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
it 'validates the operation being incremented' do
expect { described_class.increment(project, :issue, :unknown) }
- .to raise_error(ArgumentError, 'Operation must be fetched or imported')
+ .to raise_error(ArgumentError, 'operation must be fetched or imported')
end
it 'increments the counter and saves the key to be listed in the summary later' do
@@ -33,4 +33,20 @@ RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
'imported' => { 'issue' => 2 }
})
end
+
+ it 'does not increment the counter if the given value is <= 0' do
+ expect(Gitlab::Metrics)
+ .not_to receive(:counter)
+
+ expect(Gitlab::Metrics)
+ .not_to receive(:counter)
+
+ described_class.increment(project, :issue, :fetched, value: 0)
+ described_class.increment(project, :issue, :imported, value: nil)
+
+ expect(described_class.summary(project)).to eq({
+ 'fetched' => {},
+ 'imported' => {}
+ })
+ end
end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index d56d4708385..1fc7d3c887f 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ParallelScheduling do
let(:importer_class) do
Class.new do
+ def self.name
+ 'MyImporter'
+ end
+
include(Gitlab::GithubImport::ParallelScheduling)
def importer_class
@@ -21,7 +25,8 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
end
end
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let_it_be(:project) { create(:project, :import_started, import_source: 'foo/bar') }
+
let(:client) { double(:client) }
describe '#parallel?' do
@@ -79,73 +84,130 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
.to receive(:sequential_import)
.and_return([])
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'starting importer',
+ parallel: false,
+ project_id: project.id,
+ importer: 'Class'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'importer finished',
+ parallel: false,
+ project_id: project.id,
+ importer: 'Class'
+ )
+
+ importer.execute
+ end
+
+ context 'when abort_on_failure is false' do
+ it 'logs the error when it fails' do
+ exception = StandardError.new('some error')
+
+ importer = importer_class.new(project, client, parallel: false)
+
+ expect(importer)
+ .to receive(:sequential_import)
+ .and_raise(exception)
+
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
message: 'starting importer',
- import_source: :github,
parallel: false,
project_id: project.id,
importer: 'Class'
)
- expect(logger)
- .to receive(:info)
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
.with(
- message: 'importer finished',
- import_source: :github,
- parallel: false,
project_id: project.id,
- importer: 'Class'
- )
- end
+ exception: exception,
+ error_source: 'MyImporter',
+ fail_import: false
+ ).and_call_original
- importer.execute
+ expect { importer.execute }
+ .to raise_error(exception)
+
+ expect(project.import_state.reload.status).to eq('started')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
end
- it 'logs the error when it fails' do
- exception = StandardError.new('some error')
+ context 'when abort_on_failure is true' do
+ let(:importer_class) do
+ Class.new do
+ def self.name
+ 'MyImporter'
+ end
- importer = importer_class.new(project, client, parallel: false)
+ include(Gitlab::GithubImport::ParallelScheduling)
- expect(importer)
- .to receive(:sequential_import)
- .and_raise(exception)
+ def importer_class
+ Class
+ end
+
+ def object_type
+ :dummy
+ end
+
+ def collection_method
+ :issues
+ end
+
+ def abort_on_failure
+ true
+ end
+ end
+ end
+
+ it 'logs the error when it fails and marks import as failed' do
+ exception = StandardError.new('some error')
+
+ importer = importer_class.new(project, client, parallel: false)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
+ expect(importer)
+ .to receive(:sequential_import)
+ .and_raise(exception)
+
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
message: 'starting importer',
- import_source: :github,
parallel: false,
project_id: project.id,
importer: 'Class'
)
- expect(logger)
- .to receive(:error)
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
.with(
- message: 'importer failed',
- import_source: :github,
project_id: project.id,
- parallel: false,
- importer: 'Class',
- 'error.message': 'some error'
- )
- end
+ exception: exception,
+ error_source: 'MyImporter',
+ fail_import: true
+ ).and_call_original
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- exception,
- import_source: :github,
- parallel: false,
- project_id: project.id,
- importer: 'Class'
- )
- .and_call_original
+ expect { importer.execute }
+ .to raise_error(exception)
+
+ expect(project.import_state.reload.status).to eq('failed')
+ expect(project.import_state.last_error).to eq('some error')
- expect { importer.execute }.to raise_error(exception)
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 20e67a784e1..f81fa3b1e2e 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -3,7 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
- let(:project) { create(:project) }
+ let(:project) do
+ create(
+ :project,
+ import_type: 'github',
+ import_url: 'https://api.github.com/user/repo'
+ )
+ end
+
let(:client) { double(:client) }
let(:finder) { described_class.new(project, client) }
@@ -263,6 +270,26 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
finder.id_for_github_id(id)
end
+
+ context 'when importing from github enterprise' do
+ let(:project) do
+ create(
+ :project,
+ import_type: 'github',
+ import_url: 'https://othergithub.net/user/repo'
+ )
+ end
+
+ it 'does not look up the user by external id' do
+ expect(finder).not_to receive(:query_id_for_github_id)
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .with(described_class::ID_CACHE_KEY % id, nil)
+
+ finder.id_for_github_id(id)
+ end
+ end
end
describe '#id_for_github_email' do
diff --git a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
index 95e3af34174..641fb27a071 100644
--- a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
@@ -3,26 +3,23 @@
require 'spec_helper'
RSpec.describe Gitlab::GrapeLogging::Loggers::PerfLogger do
- subject { described_class.new }
+ let(:mock_request) { OpenStruct.new(env: {}) }
describe ".parameters" do
- let(:mock_request) { OpenStruct.new(env: {}) }
+ subject { described_class.new.parameters(mock_request, nil) }
- describe 'when no performance datais are present' do
- it 'returns an empty Hash' do
- expect(subject.parameters(mock_request, nil)).to eq({})
- end
+ let(:perf_data) { { redis_calls: 1 } }
+
+ describe 'when no performance data present' do
+ it { is_expected.not_to include(perf_data) }
end
- describe 'when Redis calls are present', :request_store do
- it 'returns a Hash with Redis information' do
+ describe 'when performance data present', :request_store do
+ before do
Gitlab::Redis::SharedState.with { |redis| redis.get('perf-logger-test') }
-
- payload = subject.parameters(mock_request, nil)
-
- expect(payload[:redis_calls]).to eq(1)
- expect(payload[:redis_duration_s]).to be >= 0
end
+
+ it { is_expected.to include(perf_data) }
end
end
end
diff --git a/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb b/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb
index 1d8849f7e38..33f49dbc8d4 100644
--- a/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb
+++ b/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Graphql::CallsGitaly::FieldExtension, :request_store do
let(:field_args) { {} }
let(:owner) { fresh_object_type }
let(:field) do
- ::Types::BaseField.new(name: 'value', type: GraphQL::STRING_TYPE, null: true, owner: owner, **field_args)
+ ::Types::BaseField.new(name: 'value', type: GraphQL::Types::String, null: true, owner: owner, **field_args)
end
def resolve_value
diff --git a/spec/lib/gitlab/graphql/copy_field_description_spec.rb b/spec/lib/gitlab/graphql/copy_field_description_spec.rb
index 310b4046b56..84aa548f2cf 100644
--- a/spec/lib/gitlab/graphql/copy_field_description_spec.rb
+++ b/spec/lib/gitlab/graphql/copy_field_description_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::CopyFieldDescription do
Class.new(Types::BaseObject) do
graphql_name "TestType"
- field :field_name, GraphQL::STRING_TYPE, null: true, description: 'Foo'
+ field :field_name, GraphQL::Types::String, null: true, description: 'Foo'
end
end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index 44ca23f547c..a3fb0bbbed8 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
expect(field.name).to eq('testHtml')
expect(field.description).to eq('The GitLab Flavored Markdown rendering of `hello`')
- expect(field.type).to eq(GraphQL::STRING_TYPE)
+ expect(field.type).to eq(GraphQL::Types::String)
expect(field.to_graphql.complexity).to eq(5)
end
diff --git a/spec/lib/gitlab/graphql/mount_mutation_spec.rb b/spec/lib/gitlab/graphql/mount_mutation_spec.rb
index d6b932e08d2..fe25e923506 100644
--- a/spec/lib/gitlab/graphql/mount_mutation_spec.rb
+++ b/spec/lib/gitlab/graphql/mount_mutation_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe Gitlab::Graphql::MountMutation do
Class.new(Mutations::BaseMutation) do
graphql_name 'TestMutation'
- argument :foo, GraphQL::STRING_TYPE, required: false
- field :bar, GraphQL::STRING_TYPE, null: true
+ argument :foo, GraphQL::Types::String, required: false
+ field :bar, GraphQL::Types::String, null: true
end
end
diff --git a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
index bc6e25eb018..71ef75836c0 100644
--- a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
+++ b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Graphql::NegatableArguments do
it 'defines any arguments passed as block' do
test_resolver.negated do
- argument :foo, GraphQL::STRING_TYPE, required: false
+ argument :foo, GraphQL::Types::String, required: false
end
expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(['foo'])
@@ -27,10 +27,10 @@ RSpec.describe Gitlab::Graphql::NegatableArguments do
it 'defines all arguments passed as block even if called multiple times' do
test_resolver.negated do
- argument :foo, GraphQL::STRING_TYPE, required: false
+ argument :foo, GraphQL::Types::String, required: false
end
test_resolver.negated do
- argument :bar, GraphQL::STRING_TYPE, required: false
+ argument :bar, GraphQL::Types::String, required: false
end
expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(%w[foo bar])
diff --git a/spec/lib/gitlab/graphql/pagination/connections_spec.rb b/spec/lib/gitlab/graphql/pagination/connections_spec.rb
index e89e5c17644..f3f59113c81 100644
--- a/spec/lib/gitlab/graphql/pagination/connections_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/connections_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe ::Gitlab::Graphql::Pagination::Connections do
let(:node_type) do
Class.new(::GraphQL::Schema::Object) do
graphql_name 'Node'
- field :value, GraphQL::INT_TYPE, null: false
+ field :value, GraphQL::Types::Int, null: false
end
end
diff --git a/spec/lib/gitlab/graphql/present/field_extension_spec.rb b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
index 6ea313d30b3..5f0f444e0bb 100644
--- a/spec/lib/gitlab/graphql/present/field_extension_spec.rb
+++ b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
let(:owner) { fresh_object_type }
let(:field_name) { 'value' }
let(:field) do
- ::Types::BaseField.new(name: field_name, type: GraphQL::STRING_TYPE, null: true, owner: owner)
+ ::Types::BaseField.new(name: field_name, type: GraphQL::Types::String, null: true, owner: owner)
end
let(:base_presenter) do
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
Module.new do
include ::Types::BaseInterface
- field :interface_field, GraphQL::STRING_TYPE, null: true
+ field :interface_field, GraphQL::Types::String, null: true
end
end
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
end
it 'resolves the interface field using the implementation from the presenter' do
- field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
+ field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::Types::String, null: true, owner: interface)
value = resolve_field(field, object, object_type: implementation)
expect(value).to eq 'made of concrete'
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
context 'when the implementation is inherited' do
it 'resolves the interface field using the implementation from the presenter' do
subclass = Class.new(implementation) { graphql_name 'Subclass' }
- field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
+ field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::Types::String, null: true, owner: interface)
value = resolve_field(field, object, object_type: subclass)
expect(value).to eq 'made of concrete'
@@ -79,8 +79,8 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
def parent
type = fresh_object_type('Parent')
type.present_using(provide_foo)
- type.field :foo, ::GraphQL::INT_TYPE, null: true
- type.field :value, ::GraphQL::STRING_TYPE, null: true
+ type.field :foo, ::GraphQL::Types::Int, null: true
+ type.field :value, ::GraphQL::Types::String, null: true
type
end
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
type = Class.new(parent)
type.graphql_name 'Child'
type.present_using(provide_bar)
- type.field :bar, ::GraphQL::INT_TYPE, null: true
+ type.field :bar, ::GraphQL::Types::Int, null: true
type
end
@@ -150,7 +150,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
let(:field) do
::Types::BaseField.new(
name: field_name,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: true,
owner: owner,
resolve: ->(obj, args, ctx) { 'Hello from a proc' }
diff --git a/spec/lib/gitlab/graphql/queries_spec.rb b/spec/lib/gitlab/graphql/queries_spec.rb
index a1cd2cdb2de..8b7f4ca7933 100644
--- a/spec/lib/gitlab/graphql/queries_spec.rb
+++ b/spec/lib/gitlab/graphql/queries_spec.rb
@@ -21,30 +21,30 @@ RSpec.describe Gitlab::Graphql::Queries do
let_it_be(:schema) do
author = Class.new(GraphQL::Schema::Object) do
graphql_name 'Author'
- field :name, GraphQL::STRING_TYPE, null: true
- field :handle, GraphQL::STRING_TYPE, null: false
- field :verified, GraphQL::BOOLEAN_TYPE, null: false
+ field :name, GraphQL::Types::String, null: true
+ field :handle, GraphQL::Types::String, null: false
+ field :verified, GraphQL::Types::Boolean, null: false
end
post = Class.new(GraphQL::Schema::Object) do
graphql_name 'Post'
- field :name, GraphQL::STRING_TYPE, null: false
- field :title, GraphQL::STRING_TYPE, null: false
- field :content, GraphQL::STRING_TYPE, null: true
+ field :name, GraphQL::Types::String, null: false
+ field :title, GraphQL::Types::String, null: false
+ field :content, GraphQL::Types::String, null: true
field :author, author, null: false
end
author.field :posts, [post], null: false do
- argument :blog_title, GraphQL::STRING_TYPE, required: false
+ argument :blog_title, GraphQL::Types::String, required: false
end
blog = Class.new(GraphQL::Schema::Object) do
graphql_name 'Blog'
- field :title, GraphQL::STRING_TYPE, null: false
- field :description, GraphQL::STRING_TYPE, null: false
+ field :title, GraphQL::Types::String, null: false
+ field :description, GraphQL::Types::String, null: false
field :main_author, author, null: false
field :posts, [post], null: false
field :post, post, null: true do
- argument :slug, GraphQL::STRING_TYPE, required: true
+ argument :slug, GraphQL::Types::String, required: true
end
end
@@ -52,10 +52,10 @@ RSpec.describe Gitlab::Graphql::Queries do
query(Class.new(GraphQL::Schema::Object) do
graphql_name 'Query'
field :blog, blog, null: true do
- argument :title, GraphQL::STRING_TYPE, required: true
+ argument :title, GraphQL::Types::String, required: true
end
field :post, post, null: true do
- argument :slug, GraphQL::STRING_TYPE, required: true
+ argument :slug, GraphQL::Types::String, required: true
end
end)
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index ccb3ae1018a..1f06019c929 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -50,16 +50,9 @@ RSpec.describe Gitlab::Highlight do
let(:result) { described_class.highlight(file_name, content) } # content is 44 bytes
before do
- stub_feature_flags(one_megabyte_file_size_limit: false)
stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
end
- it 'confirm file size is 1MB when `one_megabyte_file_size_limit` is enabled' do
- stub_feature_flags(one_megabyte_file_size_limit: true)
- expect(described_class.too_large?(1024.kilobytes)).to eq(false)
- expect(described_class.too_large?(1025.kilobytes)).to eq(true)
- end
-
it 'increments the metric for oversized files' do
expect { result }.to change { over_highlight_size_limit('file size: 0.0001') }.by(1)
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 71e80de9f89..d0aae2ac475 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Gitlab::HTTP do
WebMock.stub_request(:post, /.*/).to_return do |request|
sleep 0.002.seconds
- { body: 'I\m slow', status: 200 }
+ { body: 'I\'m slow', status: 200 }
end
end
@@ -41,25 +41,67 @@ RSpec.describe Gitlab::HTTP do
subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
- specify do
- expect { request_slow_responder }.not_to raise_error
+ shared_examples 'tracks the timeout but does not raise an error' do
+ specify :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(Gitlab::HTTP::ReadTotalTimeout)
+ ).once
+
+ expect { request_slow_responder }.not_to raise_error
+ end
+
+ it 'still calls the block' do
+ expect { |b| described_class.post('http://example.org', **options, &b) }.to yield_with_args
+ end
end
- context 'with use_read_total_timeout option' do
+ shared_examples 'does not track or raise timeout error' do
+ specify :aggregate_failures do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ expect { request_slow_responder }.not_to raise_error
+ end
+ end
+
+ it_behaves_like 'tracks the timeout but does not raise an error'
+
+ context 'and use_read_total_timeout option is truthy' do
let(:options) { { use_read_total_timeout: true } }
- it 'raises a timeout error' do
+ it 'raises an error' do
expect { request_slow_responder }.to raise_error(Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
end
+ end
- context 'and timeout option' do
- let(:options) { { use_read_total_timeout: true, timeout: 10.seconds } }
+ context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
+ let(:options) { { timeout: 10.seconds } }
- it 'overrides the default timeout when timeout option is present' do
- expect { request_slow_responder }.not_to raise_error
- end
+ it_behaves_like 'does not track or raise timeout error'
+ end
+
+ context 'and stream_body option is truthy' do
+ let(:options) { { stream_body: true } }
+
+ it_behaves_like 'does not track or raise timeout error'
+
+ context 'but skip_read_total_timeout option is falsey' do
+ let(:options) { { stream_body: true, skip_read_total_timeout: false } }
+
+ it_behaves_like 'tracks the timeout but does not raise an error'
end
end
+
+ context 'and skip_read_total_timeout option is truthy' do
+ let(:options) { { skip_read_total_timeout: true } }
+
+ it_behaves_like 'does not track or raise timeout error'
+ end
+
+ context 'and skip_read_total_timeout option is falsely' do
+ let(:options) { { skip_read_total_timeout: false } }
+
+ it_behaves_like 'tracks the timeout but does not raise an error'
+ end
end
it 'calls a block' do
diff --git a/spec/lib/gitlab/import/database_helpers_spec.rb b/spec/lib/gitlab/import/database_helpers_spec.rb
index d56e05df5d7..079faed2518 100644
--- a/spec/lib/gitlab/import/database_helpers_spec.rb
+++ b/spec/lib/gitlab/import/database_helpers_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Import::DatabaseHelpers do
let(:project) { create(:project) }
it 'returns the ID returned by the query' do
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with(Issue.table_name, [attributes], return_ids: true)
.and_return([10])
diff --git a/spec/lib/gitlab/import/import_failure_service_spec.rb b/spec/lib/gitlab/import/import_failure_service_spec.rb
new file mode 100644
index 00000000000..50b32d634ad
--- /dev/null
+++ b/spec/lib/gitlab/import/import_failure_service_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Import::ImportFailureService do
+ let_it_be(:import_type) { 'import_type' }
+
+ let_it_be(:project) do
+ create(
+ :project,
+ :import_started,
+ import_type: import_type
+ )
+ end
+
+ let(:import_state) { project.import_state }
+ let(:exception) { StandardError.new('some error') }
+
+ shared_examples 'logs the exception and fails the import' do
+ it 'when the failure does not abort the import' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ exception,
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ expect(Gitlab::Import::Logger)
+ .to receive(:error)
+ .with(
+ message: 'importer failed',
+ 'error.message': 'some error',
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ described_class.track(**arguments)
+
+ expect(project.import_state.reload.status).to eq('failed')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
+ end
+
+ shared_examples 'logs the exception and does not fail the import' do
+ it 'when the failure does not abort the import' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ exception,
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ expect(Gitlab::Import::Logger)
+ .to receive(:error)
+ .with(
+ message: 'importer failed',
+ 'error.message': 'some error',
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ described_class.track(**arguments)
+
+ expect(project.import_state.reload.status).to eq('started')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
+ end
+
+ context 'when using the project as reference' do
+ context 'when it fails the import' do
+ let(:arguments) do
+ {
+ project_id: project.id,
+ exception: exception,
+ error_source: 'SomeImporter',
+ fail_import: true
+ }
+ end
+
+ it_behaves_like 'logs the exception and fails the import'
+ end
+
+ context 'when it does not fail the import' do
+ let(:arguments) do
+ {
+ project_id: project.id,
+ exception: exception,
+ error_source: 'SomeImporter',
+ fail_import: false
+ }
+ end
+
+ it_behaves_like 'logs the exception and does not fail the import'
+ end
+ end
+
+ context 'when using the import_state as reference' do
+ context 'when it fails the import' do
+ let(:arguments) do
+ {
+ import_state: import_state,
+ exception: exception,
+ error_source: 'SomeImporter',
+ fail_import: true
+ }
+ end
+
+ it_behaves_like 'logs the exception and fails the import'
+ end
+
+ context 'when it does not fail the import' do
+ let(:arguments) do
+ {
+ import_state: import_state,
+ exception: exception,
+ error_source: 'SomeImporter',
+ fail_import: false
+ }
+ end
+
+ it_behaves_like 'logs the exception and does not fail the import'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import/logger_spec.rb b/spec/lib/gitlab/import/logger_spec.rb
new file mode 100644
index 00000000000..60978aaa25c
--- /dev/null
+++ b/spec/lib/gitlab/import/logger_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Import::Logger do
+ subject { described_class.new('/dev/null') }
+
+ let(:now) { Time.zone.now }
+
+ describe '#format_message' do
+ before do
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('new-correlation-id')
+ end
+
+ it 'formats strings' do
+ output = subject.format_message('INFO', now, 'test', 'Hello world')
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'message' => 'Hello world',
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers'
+ })
+ end
+
+ it 'formats hashes' do
+ output = subject.format_message('INFO', now, 'test', { hello: 1 })
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'hello' => 1,
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers'
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 78805cea66a..2b7138a7a10 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -7,6 +7,7 @@ issues:
- updated_by
- milestone
- iteration
+- work_item_type
- notes
- resource_label_events
- resource_weight_events
@@ -56,6 +57,9 @@ issues:
- issue_email_participants
- test_reports
- requirement
+- incident_management_issuable_escalation_status
+work_item_type:
+- issues
events:
- author
- project
@@ -461,7 +465,6 @@ project:
- file_uploads
- import_state
- members_and_requesters
-- build_trace_section_names
- build_trace_chunks
- job_artifacts
- root_of_fork_network
@@ -579,6 +582,7 @@ project:
- security_orchestration_policy_configuration
- timelogs
- error_tracking_errors
+- error_tracking_client_keys
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 29b192de809..fc08a13a8bd 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
end
it 'does not complain about non UTF-8 characters in MR diff files' do
- ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
+ MergeRequest.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
expect(subject['merge_requests'].first['merge_request_diff']).not_to be_empty
end
diff --git a/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
index bfcd4994995..dbd6cb243f6 100644
--- a/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::ImportExport::Group::LegacyTreeRestorer do
let(:group) { create(:group) }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group, group_hash: nil) }
- let(:group_json) { ActiveSupport::JSON.decode(IO.read(File.join(shared.export_path, 'group.json'))) }
+ let(:group_json) { Gitlab::Json.parse(IO.read(File.join(shared.export_path, 'group.json'))) }
shared_examples 'excluded attributes' do
excluded_attributes = %w[
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index d2153221e8f..b67d42d1b71 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) }
let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') }
- let(:group_json) { ActiveSupport::JSON.decode(IO.read(exported_file)) }
+ let(:group_json) { Gitlab::Json.parse(IO.read(exported_file)) }
shared_examples 'excluded attributes' do
excluded_attributes = %w[
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 9c6d2708607..90966cb4915 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Test coverage of the Project Import' do
end
def relations_from_json(json_file)
- json = ActiveSupport::JSON.decode(IO.read(json_file))
+ json = Gitlab::Json.parse(IO.read(json_file))
[].tap {|res| gather_relations({ project: json }, res, [])}
.map {|relation_names| relation_names.join('.')}
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index deb22de9160..9e30564b437 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -156,6 +156,41 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
subject.execute
end
end
+
+ describe 'load balancing' do
+ context 'when feature flag load_balancing_for_export_workers is enabled' do
+ before do
+ stub_feature_flags(load_balancing_for_export_workers: true)
+ end
+
+ context 'when enabled', :db_load_balancing do
+ it 'reads from replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
+
+ subject.execute
+ end
+ end
+
+ context 'when disabled' do
+ it 'reads from primary' do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
+ expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_replicas_for_read_queries)
+
+ subject.execute
+ end
+ end
+ end
+
+ context 'when feature flag load_balancing_for_export_workers is disabled' do
+ it 'reads from primary' do
+ stub_feature_flags(load_balancing_for_export_workers: false)
+
+ expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_replicas_for_read_queries)
+
+ subject.execute
+ end
+ end
+ end
end
describe '.batch_size' do
diff --git a/spec/lib/gitlab/import_export/members_mapper_spec.rb b/spec/lib/gitlab/import_export/members_mapper_spec.rb
index 9755e322221..04c27b6f8ad 100644
--- a/spec/lib/gitlab/import_export/members_mapper_spec.rb
+++ b/spec/lib/gitlab/import_export/members_mapper_spec.rb
@@ -165,11 +165,10 @@ RSpec.describe Gitlab::ImportExport::MembersMapper do
let(:member_class) { ProjectMember }
let(:importable) { create(:project, :public, name: 'searchable_project') }
- it 'authorizes the users to the project' do
+ it 'adds users to project members' do
members_mapper.map
- expect(user.authorized_project?(importable)).to be true
- expect(user2.authorized_project?(importable)).to be true
+ expect(importable.reload.members.map(&:user)).to include(user, user2)
end
it 'maps an owner as a maintainer' do
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index fd6c66a10a7..bee7c59cab0 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -386,7 +386,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
end
it 'does not complain about non UTF-8 characters in MR diff files' do
- ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
+ MergeRequestDiffFile.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
expect(project_tree_saver.save).to be true
end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index 09280402e2b..cd1828791c3 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -111,4 +111,35 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
end
end
end
+
+ context 'when a command takes longer than DURATION_ERROR_THRESHOLD' do
+ let(:threshold) { 0.5 }
+
+ before do
+ stub_const("#{described_class}::DURATION_ERROR_THRESHOLD", threshold)
+ end
+
+ context 'when report_on_long_redis_durations is disabled' do
+ it 'does nothing' do
+ stub_feature_flags(report_on_long_redis_durations: false)
+
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ Gitlab::Redis::SharedState.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } }
+ end
+ end
+
+ context 'when report_on_long_redis_durations is enabled' do
+ it 'tracks an exception and continues' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(an_instance_of(described_class::MysteryRedisDurationError),
+ command: 'mget',
+ duration: be > threshold,
+ timestamp: a_string_matching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{5}/))
+
+ Gitlab::Redis::SharedState.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 48fcc9f93db..85daf50717c 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -83,6 +83,12 @@ RSpec.describe Gitlab::InstrumentationHelper do
expect(payload).to include(:cpu_s)
end
+ it 'logs the process ID' do
+ subject
+
+ expect(payload).to include(:pid)
+ end
+
context 'when logging memory allocations' do
include MemoryInstrumentationHelper
@@ -102,8 +108,6 @@ RSpec.describe Gitlab::InstrumentationHelper do
end
context 'when load balancing is enabled' do
- include_context 'clear DB Load Balancing configuration'
-
before do
allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
end
diff --git a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
index e57a8457e7c..198d2db234c 100644
--- a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
@@ -192,6 +192,19 @@ RSpec.describe Gitlab::JiraImport::IssueSerializer do
expect(subject[:assignee_ids]).to be_nil
end
end
+
+ context 'with jira server response' do
+ let(:assignee) { double(attrs: { 'displayName' => 'Solver', 'key' => '1234' }) }
+
+ context 'when assignee maps to a valid GitLab user' do
+ it 'sets the issue assignees to the mapped user' do
+ expect(Gitlab::JiraImport).to receive(:get_user_mapping).with(project.id, '1234')
+ .and_return(user.id)
+
+ expect(subject[:assignee_ids]).to eq([user.id])
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 8265c3449bb..7899d01b475 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe Gitlab::JsonCache do
.with(expanded_key)
.and_return(nil)
- expect(ActiveSupport::JSON).not_to receive(:decode)
+ expect(Gitlab::Json).not_to receive(:parse)
expect(cache.read(key)).to be_nil
end
@@ -140,7 +140,7 @@ RSpec.describe Gitlab::JsonCache do
.with(expanded_key)
.and_return(true)
- expect(ActiveSupport::JSON).to receive(:decode).with("true").and_call_original
+ expect(Gitlab::Json).to receive(:parse).with("true").and_call_original
expect(cache.read(key, BroadcastMessage)).to eq(true)
end
end
@@ -151,7 +151,7 @@ RSpec.describe Gitlab::JsonCache do
.with(expanded_key)
.and_return(false)
- expect(ActiveSupport::JSON).to receive(:decode).with("false").and_call_original
+ expect(Gitlab::Json).to receive(:parse).with("false").and_call_original
expect(cache.read(key, BroadcastMessage)).to eq(false)
end
end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index 24d2b03fe2a..17d038ed16c 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -65,6 +65,38 @@ RSpec.describe Gitlab::Kas do
end
end
+ describe '.tunnel_url' do
+ before do
+ stub_config(gitlab_kas: { external_url: external_url })
+ end
+
+ subject { described_class.tunnel_url }
+
+ context 'external_url uses wss://' do
+ let(:external_url) { 'wss://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('https://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses ws://' do
+ let(:external_url) { 'ws://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('http://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses grpcs://' do
+ let(:external_url) { 'grpcs://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('https://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses grpc://' do
+ let(:external_url) { 'grpc://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('http://kas.gitlab.example.com/k8s-proxy') }
+ end
+ end
+
describe '.internal_url' do
it 'returns gitlab_kas internal_url config' do
expect(described_class.internal_url).to eq(Gitlab.config.gitlab_kas.internal_url)
diff --git a/spec/lib/gitlab/kubernetes/default_namespace_spec.rb b/spec/lib/gitlab/kubernetes/default_namespace_spec.rb
index 976fe4a0a87..b6816a18baa 100644
--- a/spec/lib/gitlab/kubernetes/default_namespace_spec.rb
+++ b/spec/lib/gitlab/kubernetes/default_namespace_spec.rb
@@ -32,6 +32,14 @@ RSpec.describe Gitlab::Kubernetes::DefaultNamespace do
subject { generator.from_environment_slug(environment.slug) }
+ shared_examples_for 'handles very long project paths' do
+ before do
+ allow(project).to receive(:path).and_return 'x' * 100
+ end
+
+ it { is_expected.to satisfy { |s| s.length <= 63 } }
+ end
+
context 'namespace per environment is enabled' do
context 'platform namespace is specified' do
let(:platform_namespace) { 'platform-namespace' }
@@ -47,15 +55,12 @@ RSpec.describe Gitlab::Kubernetes::DefaultNamespace do
context 'platform namespace is blank' do
let(:platform_namespace) { nil }
- let(:mock_namespace) { 'mock-namespace' }
- it 'constructs a namespace from the project and environment' do
- expect(Gitlab::NamespaceSanitizer).to receive(:sanitize)
- .with("#{project.path}-#{project.id}-#{environment.slug}".downcase)
- .and_return(mock_namespace)
-
- expect(subject).to eq mock_namespace
+ it 'constructs a namespace from the project and environment slug' do
+ expect(subject).to eq "path-with-capitals-#{project.id}-#{environment.slug}"
end
+
+ it_behaves_like 'handles very long project paths'
end
end
@@ -70,15 +75,12 @@ RSpec.describe Gitlab::Kubernetes::DefaultNamespace do
context 'platform namespace is blank' do
let(:platform_namespace) { nil }
- let(:mock_namespace) { 'mock-namespace' }
- it 'constructs a namespace from the project and environment' do
- expect(Gitlab::NamespaceSanitizer).to receive(:sanitize)
- .with("#{project.path}-#{project.id}".downcase)
- .and_return(mock_namespace)
-
- expect(subject).to eq mock_namespace
+ it 'constructs a namespace from just the project' do
+ expect(subject).to eq "path-with-capitals-#{project.id}"
end
+
+ it_behaves_like 'handles very long project paths'
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/kubeconfig/entry/cluster_spec.rb b/spec/lib/gitlab/kubernetes/kubeconfig/entry/cluster_spec.rb
new file mode 100644
index 00000000000..508808be1be
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/kubeconfig/entry/cluster_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Kubeconfig::Entry::Cluster do
+ describe '#to_h' do
+ let(:name) { 'name' }
+ let(:url) { 'url' }
+
+ subject { described_class.new(name: name, url: url).to_h }
+
+ it { is_expected.to eq({ name: name, cluster: { server: url } }) }
+
+ context 'with a certificate' do
+ let(:cert) { 'certificate' }
+ let(:cert_encoded) { Base64.strict_encode64(cert) }
+
+ subject { described_class.new(name: name, url: url, ca_pem: cert).to_h }
+
+ it { is_expected.to eq({ name: name, cluster: { server: url, 'certificate-authority-data': cert_encoded } }) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kubeconfig/entry/context_spec.rb b/spec/lib/gitlab/kubernetes/kubeconfig/entry/context_spec.rb
new file mode 100644
index 00000000000..43d4c46fda1
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/kubeconfig/entry/context_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Kubeconfig::Entry::Context do
+ describe '#to_h' do
+ let(:name) { 'name' }
+ let(:user) { 'user' }
+ let(:cluster) { 'cluster' }
+
+ subject { described_class.new(name: name, user: user, cluster: cluster).to_h }
+
+ it { is_expected.to eq({ name: name, context: { cluster: cluster, user: user } }) }
+
+ context 'with a namespace' do
+ let(:namespace) { 'namespace' }
+
+ subject { described_class.new(name: name, user: user, cluster: cluster, namespace: namespace).to_h }
+
+ it { is_expected.to eq({ name: name, context: { cluster: cluster, user: user, namespace: namespace } }) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kubeconfig/entry/user_spec.rb b/spec/lib/gitlab/kubernetes/kubeconfig/entry/user_spec.rb
new file mode 100644
index 00000000000..3d6acc80823
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/kubeconfig/entry/user_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Kubeconfig::Entry::User do
+ describe '#to_h' do
+ let(:name) { 'name' }
+ let(:token) { 'token' }
+
+ subject { described_class.new(name: name, token: token).to_h }
+
+ it { is_expected.to eq({ name: name, user: { token: token } }) }
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb b/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
new file mode 100644
index 00000000000..057c4373329
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Kubeconfig::Template do
+ let(:template) { described_class.new }
+
+ describe '#valid?' do
+ subject { template.valid? }
+
+ it { is_expected.to be_falsey }
+
+ context 'with configuration added' do
+ before do
+ template.add_context(name: 'name', cluster: 'cluster', user: 'user')
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#to_h' do
+ subject { described_class.new.to_h }
+
+ it do
+ is_expected.to eq(
+ apiVersion: 'v1',
+ kind: 'Config',
+ clusters: [],
+ users: [],
+ contexts: []
+ )
+ end
+ end
+
+ describe '#to_yaml' do
+ subject { template.to_yaml }
+
+ it { is_expected.to eq(YAML.dump(template.to_h.deep_stringify_keys)) }
+ end
+
+ describe 'adding entries' do
+ let(:entry) { instance_double(entry_class, to_h: attributes) }
+ let(:attributes) do
+ { name: 'name', other: 'other' }
+ end
+
+ subject { template.to_h }
+
+ before do
+ expect(entry_class).to receive(:new).with(attributes).and_return(entry)
+ end
+
+ describe '#add_cluster' do
+ let(:entry_class) { Gitlab::Kubernetes::Kubeconfig::Entry::Cluster }
+
+ before do
+ template.add_cluster(**attributes)
+ end
+
+ it { is_expected.to include(clusters: [attributes]) }
+ end
+
+ describe '#add_user' do
+ let(:entry_class) { Gitlab::Kubernetes::Kubeconfig::Entry::User }
+
+ before do
+ template.add_user(**attributes)
+ end
+
+ it { is_expected.to include(users: [attributes]) }
+ end
+
+ describe '#add_context' do
+ let(:entry_class) { Gitlab::Kubernetes::Kubeconfig::Entry::Context }
+
+ before do
+ template.add_context(**attributes)
+ end
+
+ it { is_expected.to include(contexts: [attributes]) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
index 23dbd4a5bb3..98385cd80cc 100644
--- a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
@@ -178,4 +178,66 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
thing.refresh_markdown_cache!
end
end
+
+ context 'with note' do
+ let(:klass) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'notes'
+ include CacheMarkdownField
+ include Importable
+ include Mentionable
+
+ attr_mentionable :note, pipeline: :note
+ cache_markdown_field :note, pipeline: :note
+ end
+ end
+
+ let(:thing) { klass.new(note: markdown) }
+
+ before do
+ thing.note = "hello world"
+ end
+
+ it 'calls store_mentions!' do
+ expect(thing).to receive(:store_mentions!).and_call_original
+
+ thing.save!
+ end
+
+ context 'during import' do
+ before do
+ thing.importing = true
+ end
+
+ it 'does not call store_mentions!' do
+ expect(thing).not_to receive(:store_mentions!)
+
+ thing.save!
+ end
+ end
+ end
+
+ context 'when persisted cache is newer than current version' do
+ before do
+ thing.update_column(:cached_markdown_version, thing.cached_markdown_version + 1)
+ end
+
+ it 'does not save the generated HTML' do
+ expect(thing).not_to receive(:update_columns)
+
+ thing.refresh_markdown_cache!
+ end
+ end
+
+ context 'when persisted cache is nil' do
+ before do
+ thing.update_column(:cached_markdown_version, nil)
+ end
+
+ it 'does not save the generated HTML' do
+ expect(thing).to receive(:update_columns)
+
+ thing.refresh_markdown_cache!
+ end
+ end
end
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index 9572e9f50be..7dda10ab41d 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -18,8 +18,8 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
let(:labels) do
{
class: 'ActiveRecord::Base',
- host: Gitlab::Database.config['host'],
- port: Gitlab::Database.config['port']
+ host: Gitlab::Database.main.config['host'],
+ port: Gitlab::Database.main.config['port']
}
end
diff --git a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
index 0516091a8ec..08437920e0c 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do
allow(::Gitlab::Metrics).to receive(:histogram).with(
:action_cable_transmitted_bytes, /transmit/
).and_return(counter)
- message_size = ::ActiveSupport::JSON.encode(data).bytesize
+ message_size = ::Gitlab::Json.generate(data).bytesize
expect(counter).to receive(:observe).with({ channel: channel_class, operation: 'event' }, message_size)
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 6fc8f090431..3ffbcbea03c 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
let(:env) { {} }
let(:subscriber) { described_class.new }
let(:connection) { ActiveRecord::Base.connection }
+ let(:db_config_name) { ::Gitlab::Database.db_config_name(connection) }
describe '#transaction' do
let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
@@ -36,7 +37,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
it 'captures the metrics for web only' do
- expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23)
+ expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23, db_config_name: db_config_name)
expect(background_transaction).not_to receive(:observe)
expect(background_transaction).not_to receive(:increment)
@@ -56,7 +57,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
it 'captures the metrics for web only' do
- expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23)
+ expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name })
expect(background_transaction).not_to receive(:observe)
expect(background_transaction).not_to receive(:increment)
@@ -76,7 +77,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
it 'captures the metrics for web only' do
- expect(background_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23)
+ expect(background_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23, db_config_name: db_config_name)
expect(web_transaction).not_to receive(:observe)
expect(web_transaction).not_to receive(:increment)
diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb
index 65ec3535271..294a5ee82ed 100644
--- a/spec/lib/gitlab/middleware/multipart_spec.rb
+++ b/spec/lib/gitlab/middleware/multipart_spec.rb
@@ -77,7 +77,8 @@ RSpec.describe Gitlab::Middleware::Multipart do
result = subject
expect(result[0]).to eq(400)
- expect(result[2]).to include('insecure path used')
+ expect(result[2]).to be_a(Array)
+ expect(result[2].first).to include('insecure path used')
end
end
end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index 64161fbafdd..86d09f4601c 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
end
it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1)
+ relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1.id)
expect(relation).to contain_exactly(child2)
end
@@ -143,7 +143,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
end
it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1)
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1.id)
expect(relation).to be_empty
end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 562a9bf4460..b867dd533e0 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -6,32 +6,67 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
describe 'paginate over items correctly' do
let(:table) { Arel::Table.new(:my_table) }
let(:order) { nil }
+ let(:default_limit) { 999 }
+ let(:query_building_method) { :build_query }
def run_query(query)
- ActiveRecord::Base.connection.execute(query).to_a
+ ApplicationRecord.connection.execute(query).to_a
end
- def build_query(order:, where_conditions: nil, limit: nil)
+ def where_conditions_as_sql(where_conditions)
+ "WHERE #{Array(where_conditions).map(&:to_sql).join(' OR ')}"
+ end
+
+ def build_query(order:, where_conditions: [], limit: nil)
+ where_string = where_conditions_as_sql(where_conditions)
+
+ <<-SQL
+ SELECT id, year, month
+ FROM (#{table_data}) my_table (id, year, month)
+ #{where_string if where_conditions.present?}
+ ORDER BY #{order}
+ LIMIT #{limit || default_limit};
+ SQL
+ end
+
+ def build_union_query(order:, where_conditions: [], limit: nil)
+ return build_query(order: order, where_conditions: where_conditions, limit: limit) if where_conditions.blank?
+
+ union_queries = Array(where_conditions).map do |where_condition|
+ <<-SQL
+ (SELECT id, year, month
+ FROM (#{table_data}) my_table (id, year, month)
+ WHERE #{where_condition.to_sql}
+ ORDER BY #{order}
+ LIMIT #{limit || default_limit})
+ SQL
+ end
+
+ union_query = union_queries.join(" UNION ALL ")
+
<<-SQL
- SELECT id, year, month
- FROM (#{table_data}) my_table (id, year, month)
- WHERE #{where_conditions || '1=1'}
- ORDER BY #{order}
- LIMIT #{limit || 999};
+ SELECT id, year, month
+ FROM (#{union_query}) as my_table
+ ORDER BY #{order}
+ LIMIT #{limit || default_limit};
SQL
end
+ def cursor_attributes_for_node(node)
+ order.cursor_attributes_for_node(node)
+ end
+
def iterate_and_collect(order:, page_size:, where_conditions: nil)
all_items = []
loop do
- paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
+ paginated_items = run_query(send(query_building_method, order: order, where_conditions: where_conditions, limit: page_size))
break if paginated_items.empty?
all_items.concat(paginated_items)
last_item = paginated_items.last
- cursor_attributes = order.cursor_attributes_for_node(last_item)
- where_conditions = order.where_values_with_or_query(cursor_attributes).to_sql
+ cursor_attributes = cursor_attributes_for_node(last_item)
+ where_conditions = order.build_where_values(cursor_attributes)
end
all_items
@@ -54,15 +89,41 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
it { expect(subject).to eq(expected) }
end
+
+ context 'when using the conditions in an UNION query' do
+ let(:query_building_method) { :build_union_query }
+
+ it { expect(subject).to eq(expected) }
+ end
+
+ context 'when the cursor attributes are SQL literals' do
+ def cursor_attributes_for_node(node)
+ # Simulate the scenario where the cursor attributes are SQL literals
+ order.cursor_attributes_for_node(node).transform_values.each_with_index do |value, i|
+ index = i + 1
+ value_sql = value.nil? ? 'NULL::integer' : value
+ values = [value_sql] * index
+ Arel.sql("(ARRAY[#{values.join(',')}])[#{index}]") # example: ARRAY[cursor_value][1] will return cursor_value
+ end
+ end
+
+ it { expect(subject).to eq(expected) }
+
+ context 'when using the conditions in an UNION query' do
+ let(:query_building_method) { :build_union_query }
+
+ it { expect(subject).to eq(expected) }
+ end
+ end
end
context 'when paginating backwards' do
subject do
last_item = expected.last
cursor_attributes = order.cursor_attributes_for_node(last_item)
- where_conditions = order.reversed_order.where_values_with_or_query(cursor_attributes)
+ where_conditions = order.reversed_order.build_where_values(cursor_attributes)
- iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions)
end
it do
@@ -371,7 +432,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
reversed = order.reversed_order
before_conditions = reversed.where_values_with_or_query(before_cursor)
- query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
+ query = build_query(order: order, where_conditions: [Arel::Nodes::And.new([after_conditions, before_conditions])], limit: 100)
expect(run_query(query)).to eq([
{ "id" => 2, "year" => 2011, "month" => 0 },
diff --git a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb
index 1ab8e22d6d1..5ccde789a2e 100644
--- a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb
+++ b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::QueryLimiting::ActiveSupportSubscriber do
context 'when the query is actually a rails cache hit' do
it 'does not increment the number of executed SQL queries' do
- ActiveRecord::Base.connection.cache do
+ User.connection.cache do
User.count
User.count
end
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 4dcf9dc2c05..c93fd884347 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -94,12 +94,6 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
expect(cache.read(:foo)).to be_empty
end
-
- it 'expires the old key format' do
- expect_any_instance_of(Redis).to receive(:unlink).with(cache.cache_key(:foo), cache.old_cache_key(:foo)) # rubocop:disable RSpec/AnyInstanceOf
-
- subject
- end
end
context 'multiple keys' do
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 2974893ec4a..b8972f28889 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -33,6 +33,10 @@ RSpec.describe Gitlab::SearchResults do
expect(results.objects('projects', page: 1, per_page: 1, without_count: false)).not_to be_kind_of(Kaminari::PaginatableWithoutCount)
end
+ it 'returns without counts collection when requested' do
+ expect(results.objects('projects', page: 1, per_page: 1, without_count: true)).to be_kind_of(Kaminari::PaginatableWithoutCount)
+ end
+
it 'uses page and per_page to paginate results' do
project2 = create(:project, name: 'foo')
diff --git a/spec/lib/gitlab/setup_helper/workhorse_spec.rb b/spec/lib/gitlab/setup_helper/workhorse_spec.rb
index aa9b4595799..18cb266bf4e 100644
--- a/spec/lib/gitlab/setup_helper/workhorse_spec.rb
+++ b/spec/lib/gitlab/setup_helper/workhorse_spec.rb
@@ -22,4 +22,28 @@ RSpec.describe Gitlab::SetupHelper::Workhorse do
end
end
end
+
+ describe '.redis_url' do
+ it 'matches the SharedState URL' do
+ expect(Gitlab::Redis::SharedState).to receive(:url).and_return('foo')
+
+ expect(described_class.redis_url).to eq('foo')
+ end
+ end
+
+ describe '.redis_db' do
+ subject { described_class.redis_db }
+
+ it 'matches the SharedState DB' do
+ expect(Gitlab::Redis::SharedState).to receive(:params).and_return(db: 1)
+
+ is_expected.to eq(1)
+ end
+
+ it 'defaults to 0 if unspecified' do
+ expect(Gitlab::Redis::SharedState).to receive(:params).and_return({})
+
+ is_expected.to eq(0)
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
index 5347680b253..3dd5ac8ee6c 100644
--- a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
+++ b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do
end
end
- context '-timeout flag' do
+ context 'with --timeout flag' do
it 'when given', 'starts Sidekiq workers with given timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout: 10))
@@ -97,6 +97,27 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do
end
end
+ context 'with --list-queues flag' do
+ it 'errors when given --list-queues and --dryrun' do
+ expect { cli.run(%w(foo --list-queues --dryrun)) }.to raise_error(described_class::CommandError)
+ end
+
+ it 'prints out a list of queues in alphabetical order' do
+ expected_queues = [
+ 'epics:epics_update_epics_dates',
+ 'epics_new_epic_issue',
+ 'new_epic',
+ 'todos_destroyer:todos_destroyer_confidential_epic'
+ ]
+
+ allow(Gitlab::SidekiqConfig::CliMethods).to receive(:query_queues).and_return(expected_queues.shuffle)
+
+ expect(cli).to receive(:puts).with([expected_queues])
+
+ cli.run(%w(--queue-selector feature_category=epics --list-queues))
+ end
+ end
+
context 'queue namespace expansion' do
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
diff --git a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
index 687e35813b1..4a8dbe69d36 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
@@ -114,6 +114,13 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
['resource_boundary=cpu', 'queue_b'],
['tags=expensive', 'queue_c']
] | 'queue_foo'
+ # Match by generated queue name
+ [
+ ['name=foo_bar', 'queue_foo'],
+ ['feature_category=feature_a|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_foo'
end
end
diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
index 0c43c33ff8c..f4d7a4b3359 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
namespace = queue.include?(':') && queue.split(':').first
inner_worker = double(
name: attributes[:worker_name] || 'Foo::BarWorker',
- queue: queue,
+ generated_queue_name: queue,
queue_namespace: namespace,
get_feature_category: attributes[:feature_category],
get_weight: attributes[:weight],
@@ -48,9 +48,9 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
describe 'delegations' do
[
- :feature_category_not_owned?, :get_feature_category, :get_weight,
- :get_worker_resource_boundary, :get_urgency, :queue,
- :queue_namespace, :worker_has_external_dependencies?
+ :feature_category_not_owned?, :generated_queue_name,
+ :get_feature_category, :get_weight, :get_worker_resource_boundary,
+ :get_urgency, :queue_namespace, :worker_has_external_dependencies?
].each do |meth|
it "delegates #{meth} to the worker class" do
worker = double
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index d2a53185acd..da135f202f6 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::SidekiqConfig do
describe '.workers_for_all_queues_yml' do
it 'returns a tuple with FOSS workers first' do
expect(described_class.workers_for_all_queues_yml.first)
- .to include(an_object_having_attributes(queue: 'post_receive'))
+ .to include(an_object_having_attributes(generated_queue_name: 'post_receive'))
end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 4406b34e638..a98038cd3f8 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -228,8 +228,6 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
context 'when the job performs database queries' do
- include_context 'clear DB Load Balancing configuration'
-
before do
allow(Time).to receive(:now).and_return(timestamp)
allow(Process).to receive(:clock_gettime).and_call_original
@@ -256,7 +254,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
call_subject(job, 'test_queue') do
- ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
+ ApplicationRecord.connection.execute('SELECT pg_sleep(0.1);')
end
end
@@ -267,7 +265,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(logger).to receive(:info).with(expected_end_payload).ordered
call_subject(job.dup, 'test_queue') do
- ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
+ ApplicationRecord.connection.execute('SELECT pg_sleep(0.1);')
end
Gitlab::SafeRequestStore.clear!
@@ -293,54 +291,41 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
include_examples 'performs database queries'
end
- context 'when load balancing is enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
+ context 'when load balancing is enabled', :db_load_balancing do
+ let(:db_config_name) { ::Gitlab::Database.db_config_name(ApplicationRecord.connection) }
- let(:dbname) { ::Gitlab::Database.dbname(ActiveRecord::Base.connection) }
+ let(:expected_db_payload_defaults) do
+ metrics =
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_counter_keys +
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_duration_keys +
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_keys +
+ [:db_duration_s]
+
+ metrics.each_with_object({}) do |key, result|
+ result[key.to_s] = 0
+ end
+ end
let(:expected_end_payload_with_db) do
- expected_end_payload.merge(
+ expected_end_payload.merge(expected_db_payload_defaults).merge(
'db_duration_s' => a_value >= 0.1,
'db_count' => a_value >= 1,
- 'db_cached_count' => 0,
- 'db_write_count' => 0,
- 'db_replica_count' => 0,
- 'db_replica_cached_count' => 0,
- 'db_replica_wal_count' => 0,
+ "db_replica_#{db_config_name}_count" => 0,
'db_replica_duration_s' => a_value >= 0,
'db_primary_count' => a_value >= 1,
- 'db_primary_cached_count' => 0,
- 'db_primary_wal_count' => 0,
+ "db_primary_#{db_config_name}_count" => a_value >= 1,
'db_primary_duration_s' => a_value > 0,
- "db_primary_#{dbname}_duration_s" => a_value > 0,
- 'db_primary_wal_cached_count' => 0,
- 'db_replica_wal_cached_count' => 0
+ "db_primary_#{db_config_name}_duration_s" => a_value > 0
)
end
let(:end_payload) do
- start_payload.merge(
+ start_payload.merge(expected_db_payload_defaults).merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
'job_status' => 'done',
'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
- 'cpu_s' => 1.111112,
- 'db_duration_s' => 0.0,
- 'db_cached_count' => 0,
- 'db_count' => 0,
- 'db_write_count' => 0,
- 'db_replica_count' => 0,
- 'db_replica_cached_count' => 0,
- 'db_replica_wal_count' => 0,
- 'db_replica_duration_s' => 0,
- 'db_primary_count' => 0,
- 'db_primary_cached_count' => 0,
- 'db_primary_wal_count' => 0,
- 'db_primary_wal_cached_count' => 0,
- 'db_replica_wal_cached_count' => 0,
- 'db_primary_duration_s' => 0
+ 'cpu_s' => 1.111112
)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 3ec8d404bf0..cae0bb6b167 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -236,7 +236,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
include_context 'server metrics with mocked prometheus'
include_context 'server metrics call'
- include_context 'clear DB Load Balancing configuration'
shared_context 'worker declaring data consistency' do
let(:worker_class) { LBTestWorker }
diff --git a/spec/lib/gitlab/sql/cte_spec.rb b/spec/lib/gitlab/sql/cte_spec.rb
index 4cf94f4dcab..18ae2cb065f 100644
--- a/spec/lib/gitlab/sql/cte_spec.rb
+++ b/spec/lib/gitlab/sql/cte_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe Gitlab::SQL::CTE do
relation = User.where(id: 1)
cte = described_class.new(:cte_name, relation)
sql = cte.to_arel.to_sql
- name = ActiveRecord::Base.connection.quote_table_name(:cte_name)
+ name = ApplicationRecord.connection.quote_table_name(:cte_name)
- sql1 = ActiveRecord::Base.connection.unprepared_statement do
+ sql1 = ApplicationRecord.connection.unprepared_statement do
relation.except(:order).to_sql
end
@@ -30,8 +30,8 @@ RSpec.describe Gitlab::SQL::CTE do
cte = described_class.new(:cte_name, nil)
table = Arel::Table.new(:kittens)
- source_name = ActiveRecord::Base.connection.quote_table_name(:cte_name)
- alias_name = ActiveRecord::Base.connection.quote_table_name(:kittens)
+ source_name = ApplicationRecord.connection.quote_table_name(:cte_name)
+ alias_name = ApplicationRecord.connection.quote_table_name(:kittens)
expect(cte.alias_to(table).to_sql).to eq("#{source_name} AS #{alias_name}")
end
diff --git a/spec/lib/gitlab/sql/glob_spec.rb b/spec/lib/gitlab/sql/glob_spec.rb
index 8e2b842add6..bb3ca0d3f5b 100644
--- a/spec/lib/gitlab/sql/glob_spec.rb
+++ b/spec/lib/gitlab/sql/glob_spec.rb
@@ -46,10 +46,10 @@ RSpec.describe Gitlab::SQL::Glob do
end
def query(sql)
- ActiveRecord::Base.connection.select_all(sql)
+ ApplicationRecord.connection.select_all(sql)
end
def quote(string)
- ActiveRecord::Base.connection.quote(string)
+ ApplicationRecord.connection.quote(string)
end
end
diff --git a/spec/lib/gitlab/sql/recursive_cte_spec.rb b/spec/lib/gitlab/sql/recursive_cte_spec.rb
index edcacd404c2..f78c4a0cc02 100644
--- a/spec/lib/gitlab/sql/recursive_cte_spec.rb
+++ b/spec/lib/gitlab/sql/recursive_cte_spec.rb
@@ -14,9 +14,9 @@ RSpec.describe Gitlab::SQL::RecursiveCTE do
cte << rel2
sql = cte.to_arel.to_sql
- name = ActiveRecord::Base.connection.quote_table_name(:cte_name)
+ name = ApplicationRecord.connection.quote_table_name(:cte_name)
- sql1, sql2 = ActiveRecord::Base.connection.unprepared_statement do
+ sql1, sql2 = ApplicationRecord.connection.unprepared_statement do
[rel1.except(:order).to_sql, rel2.except(:order).to_sql]
end
@@ -28,8 +28,8 @@ RSpec.describe Gitlab::SQL::RecursiveCTE do
it 'returns an alias for the CTE' do
table = Arel::Table.new(:kittens)
- source_name = ActiveRecord::Base.connection.quote_table_name(:cte_name)
- alias_name = ActiveRecord::Base.connection.quote_table_name(:kittens)
+ source_name = ApplicationRecord.connection.quote_table_name(:cte_name)
+ alias_name = ApplicationRecord.connection.quote_table_name(:kittens)
expect(cte.alias_to(table).to_sql).to eq("#{source_name} AS #{alias_name}")
end
@@ -37,8 +37,8 @@ RSpec.describe Gitlab::SQL::RecursiveCTE do
it 'replaces dots with an underscore' do
table = Arel::Table.new('gitlab.kittens')
- source_name = ActiveRecord::Base.connection.quote_table_name(:cte_name)
- alias_name = ActiveRecord::Base.connection.quote_table_name(:gitlab_kittens)
+ source_name = ApplicationRecord.connection.quote_table_name(:cte_name)
+ alias_name = ApplicationRecord.connection.quote_table_name(:gitlab_kittens)
expect(cte.alias_to(table).to_sql).to eq("#{source_name} AS #{alias_name}")
end
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index ed551521b1d..628eb380396 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::SubscriptionPortal, skip: Gitlab.jh? do
+RSpec.describe ::Gitlab::SubscriptionPortal do
using RSpec::Parameterized::TableSyntax
where(:method_name, :test, :development, :result) do
diff --git a/spec/lib/gitlab/usage/docs/helper_spec.rb b/spec/lib/gitlab/usage/docs/helper_spec.rb
deleted file mode 100644
index e2bb1d8d818..00000000000
--- a/spec/lib/gitlab/usage/docs/helper_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Docs::Helper do
- subject(:helper) { klass.new }
-
- let_it_be(:klass) do
- Class.new do
- include Gitlab::Usage::Docs::Helper
- end
- end
-
- let(:metric_definition) do
- {
- data_category: 'Standard',
- name: 'test_metric',
- description: description,
- product_group: 'group::product intelligence',
- status: 'data_available',
- tier: %w(free premium)
- }
- end
-
- let(:description) { 'Metric description' }
-
- describe '#render_name' do
- it { expect(helper.render_name(metric_definition[:name])).to eq('### `test_metric`') }
- end
-
- describe '#render_description' do
- context 'without description' do
- let(:description) { nil }
-
- it { expect(helper.render_description(metric_definition)).to eq('Missing description') }
- end
-
- context 'without description' do
- it { expect(helper.render_description(metric_definition)).to eq('Metric description') }
- end
- end
-
- describe '#render_yaml_link' do
- let(:yaml_link) { 'config/metrics/license/test_metric.yml' }
- let(:expected) { "[YAML definition](#{yaml_link})" }
-
- it { expect(helper.render_yaml_link(yaml_link)).to eq(expected) }
- end
-
- describe '#render_status' do
- let(:expected) { "Status: `data_available`" }
-
- it { expect(helper.render_status(metric_definition)).to eq(expected) }
- end
-
- describe '#render_owner' do
- let(:expected) { "Group: `group::product intelligence`" }
-
- it { expect(helper.render_owner(metric_definition)).to eq(expected) }
- end
-
- describe '#render_tiers' do
- let(:expected) { "Tiers: `free`, `premium`" }
-
- it { expect(helper.render_tiers(metric_definition)).to eq(expected) }
- end
-
- describe '#render_data_category' do
- let(:expected) { 'Data Category: `Standard`' }
-
- it { expect(helper.render_data_category(metric_definition)).to eq(expected) }
- end
-
- describe '#render_owner' do
- let(:expected) { "Group: `group::product intelligence`" }
-
- it { expect(helper.render_owner(metric_definition)).to eq(expected) }
- end
-end
diff --git a/spec/lib/gitlab/usage/docs/renderer_spec.rb b/spec/lib/gitlab/usage/docs/renderer_spec.rb
deleted file mode 100644
index f3b83a4a4b3..00000000000
--- a/spec/lib/gitlab/usage/docs/renderer_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-CODE_REGEX = %r{<code>(.*)</code>}.freeze
-
-RSpec.describe Gitlab::Usage::Docs::Renderer do
- describe 'contents' do
- let(:dictionary_path) { Gitlab::Usage::Docs::Renderer::DICTIONARY_PATH }
- let(:items) { Gitlab::Usage::MetricDefinition.definitions.first(10).to_h }
-
- it 'generates dictionary for given items' do
- generated_dictionary = described_class.new(items).contents
-
- generated_dictionary_keys = RDoc::Markdown
- .parse(generated_dictionary)
- .table_of_contents
- .select { |metric_doc| metric_doc.level == 3 }
- .map { |item| item.text.match(CODE_REGEX)&.captures&.first }
-
- expect(generated_dictionary_keys).to match_array(items.keys)
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/docs/value_formatter_spec.rb b/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
deleted file mode 100644
index f21656df894..00000000000
--- a/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Docs::ValueFormatter do
- describe '.format' do
- using RSpec::Parameterized::TableSyntax
- where(:key, :value, :expected_value) do
- :product_group | 'growth::product intelligence' | '`growth::product intelligence`'
- :data_source | 'redis' | 'Redis'
- :data_source | 'ruby' | 'Ruby'
- :introduced_by_url | 'http://test.com' | '[Introduced by](http://test.com)'
- :tier | %w(gold premium) | ' `gold`, `premium`'
- :distribution | %w(ce ee) | ' `ce`, `ee`'
- :key_path | 'key.path' | '**`key.path`**'
- :milestone | '13.4' | '13.4'
- :status | 'data_available' | '`data_available`'
- end
-
- with_them do
- subject { described_class.format(key, value) }
-
- it { is_expected.to eq(expected_value) }
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index f3c3e5fc550..1ae8a0881ef 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
distribution: %w(ee ce),
tier: %w(free starter premium ultimate bronze silver gold),
name: 'uuid',
- data_category: 'Standard'
+ data_category: 'standard'
}
end
@@ -87,14 +87,14 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
it 'raise exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
described_class.new(path, attributes).validate!
end
context 'with skip_validation' do
it 'raise exception if skip_validation: false' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
described_class.new(path, attributes.merge( { skip_validation: false } )).validate!
end
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
attributes[:status] = 'broken'
attributes.delete(:repair_issue_url)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
described_class.new(path, attributes).validate!
end
@@ -173,7 +173,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
write_metric(metric1, path, yaml_content)
write_metric(metric2, path, yaml_content)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
subject
end
@@ -199,7 +199,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
data_source: 'database',
distribution: %w(ee ce),
tier: %w(free starter premium ultimate bronze silver gold),
- data_category: 'Optional'
+ data_category: 'optional'
}
end
diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb
index d4a789419a4..d83f59e4a7d 100644
--- a/spec/lib/gitlab/usage/metric_spec.rb
+++ b/spec/lib/gitlab/usage/metric_spec.rb
@@ -3,27 +3,46 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metric do
- describe '#definition' do
- it 'returns key_path metric definiton' do
- expect(described_class.new(key_path: 'uuid').definition).to be_an(Gitlab::Usage::MetricDefinition)
- end
+ let!(:issue) { create(:issue) }
+
+ let(:attributes) do
+ {
+ data_category: "Operational",
+ key_path: "counts.issues",
+ description: "Count of Issues created",
+ product_section: "dev",
+ product_stage: "plan",
+ product_group: "group::plan",
+ product_category: "issue_tracking",
+ value_type: "number",
+ status: "data_available",
+ time_frame: "all",
+ data_source: "database",
+ instrumentation_class: "CountIssuesMetric",
+ distribution: %w(ce ee),
+ tier: %w(free premium ultimate)
+ }
end
- describe '#unflatten_default_path' do
- using RSpec::Parameterized::TableSyntax
+ let(:issue_count_metric_definiton) do
+ double(:issue_count_metric_definiton,
+ attributes.merge({ attributes: attributes })
+ )
+ end
- where(:key_path, :value, :expected_hash) do
- 'uuid' | nil | { uuid: nil }
- 'uuid' | '1111' | { uuid: '1111' }
- 'counts.issues' | nil | { counts: { issues: nil } }
- 'counts.issues' | 100 | { counts: { issues: 100 } }
- 'usage_activity_by_stage.verify.ci_builds' | 100 | { usage_activity_by_stage: { verify: { ci_builds: 100 } } }
- end
+ before do
+ allow(ApplicationRecord.connection).to receive(:transaction_open?).and_return(false)
+ end
- with_them do
- subject { described_class.new(key_path: key_path, value: value).unflatten_key_path }
+ describe '#with_value' do
+ it 'returns key_path metric with the corresponding value' do
+ expect(described_class.new(issue_count_metric_definiton).with_value).to eq({ counts: { issues: 1 } })
+ end
+ end
- it { is_expected.to eq(expected_hash) }
+ describe '#with_instrumentation' do
+ it 'returns key_path metric with the corresponding generated query' do
+ expect(described_class.new(issue_count_metric_definiton).with_instrumentation).to eq({ counts: { issues: "SELECT COUNT(\"issues\".\"id\") FROM \"issues\"" } })
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
index 8f52d550e5c..1b2170baf17 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CollectedDataCategoriesMetric do
it_behaves_like 'a correct instrumented metric value', {} do
- let(:expected_value) { %w[Standard Subscription Operational Optional] }
+ let(:expected_value) { %w[standard subscription operational optional] }
before do
allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
index 5e36820df5e..0a32bdb95d3 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
subject do
- described_class.tap do |m|
- m.relation { Issue }
- m.operation :count
- m.start { m.relation.minimum(:id) }
- m.finish { m.relation.maximum(:id) }
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
+ metric_class.start { metric_class.relation.minimum(:id) }
+ metric_class.finish { metric_class.relation.maximum(:id) }
end.new(time_frame: 'all')
end
@@ -38,9 +38,9 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with start and finish not called' do
subject do
- described_class.tap do |m|
- m.relation { Issue }
- m.operation :count
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
end.new(time_frame: 'all')
end
@@ -51,12 +51,12 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with cache_start_and_finish_as called' do
subject do
- described_class.tap do |m|
- m.relation { Issue }
- m.operation :count
- m.start { m.relation.minimum(:id) }
- m.finish { m.relation.maximum(:id) }
- m.cache_start_and_finish_as :special_issue_count
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
+ metric_class.start { metric_class.relation.minimum(:id) }
+ metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.cache_start_and_finish_as :special_issue_count
end.new(time_frame: 'all')
end
@@ -71,5 +71,45 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(issues.max_by(&:id).id)
end
end
+
+ context 'with estimate_batch_distinct_count' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation(:estimate_batch_distinct_count)
+ metric_class.start { metric_class.relation.minimum(:id) }
+ metric_class.finish { metric_class.relation.maximum(:id) }
+ end.new(time_frame: 'all')
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to be_within(Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE).percent_of(3)
+ end
+
+ context 'with block passed to operation' do
+ let(:buckets) { double('Buckets').as_null_object }
+
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation(:estimate_batch_distinct_count) do |result|
+ result.foo
+ end
+ metric_class.start { metric_class.relation.minimum(:id) }
+ metric_class.finish { metric_class.relation.maximum(:id) }
+ end.new(time_frame: 'all')
+ end
+
+ before do
+ allow(Gitlab::Database::PostgresHll::Buckets).to receive(:new).and_return(buckets)
+ end
+
+ it 'calls the block passing HLL buckets as an argument' do
+ expect(buckets).to receive(:foo)
+
+ subject.value
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb
new file mode 100644
index 00000000000..158be34d39c
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GenericMetric do
+ shared_examples 'custom fallback' do |custom_fallback|
+ subject do
+ Class.new(described_class) do
+ fallback(custom_fallback)
+ value { Gitlab::Database.main.version }
+ end.new(time_frame: 'none')
+ end
+
+ describe '#value' do
+ it 'gives the correct value' do
+ expect(subject.value).to eq(Gitlab::Database.main.version)
+ end
+
+ context 'when raising an exception' do
+ it 'return the custom fallback' do
+ expect(Gitlab::Database.main).to receive(:version).and_raise('Error')
+ expect(subject.value).to eq(custom_fallback)
+ end
+ end
+ end
+ end
+
+ context 'with default fallback' do
+ subject do
+ Class.new(described_class) do
+ value { Gitlab::Database.main.version }
+ end.new(time_frame: 'none')
+ end
+
+ describe '#value' do
+ it 'gives the correct value' do
+ expect(subject.value).to eq(Gitlab::Database.main.version )
+ end
+
+ context 'when raising an exception' do
+ it 'return the default fallback' do
+ expect(Gitlab::Database.main).to receive(:version).and_raise('Error')
+ expect(subject.value).to eq(described_class::FALLBACK)
+ end
+ end
+ end
+ end
+
+ context 'with custom fallback -2' do
+ it_behaves_like 'custom fallback', -2
+ end
+
+ context 'with custom fallback nil' do
+ it_behaves_like 'custom fallback', nil
+ end
+
+ context 'with custom fallback false' do
+ it_behaves_like 'custom fallback', false
+ end
+
+ context 'with custom fallback true' do
+ it_behaves_like 'custom fallback', true
+ end
+
+ context 'with custom fallback []' do
+ it_behaves_like 'custom fallback', []
+ end
+
+ context 'with custom fallback { major: -1 }' do
+ it_behaves_like 'custom fallback', { major: -1 }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb
new file mode 100644
index 00000000000..fb3bd1ba834
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::RedisMetric, :clean_gitlab_redis_shared_state do
+ before do
+ 4.times do
+ Gitlab::UsageDataCounters::SourceCodeCounter.count(:pushes)
+ end
+ end
+
+ let(:expected_value) { 4 }
+
+ it_behaves_like 'a correct instrumented metric value', { options: { event: 'pushes', counter_class: 'SourceCodeCounter' } }
+
+ it 'raises an exception if event option is not present' do
+ expect { described_class.new(counter_class: 'SourceCodeCounter') }.to raise_error(ArgumentError)
+ end
+
+ it 'raises an exception if counter_class option is not present' do
+ expect { described_class.new(event: 'pushes') }.to raise_error(ArgumentError)
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index b4ab9d4861b..0f95da74ff9 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -16,6 +16,14 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
end
end
+ describe '#add_metric' do
+ let(:metric) {'CountIssuesMetric' }
+
+ it 'computes the suggested name for given metric' do
+ expect(described_class.add_metric(metric)).to eq('count_issues')
+ end
+ end
+
context 'for count with default column metrics' do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with count(Board)
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index b1d5d106082..d4148b57348 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
Gitlab::Ci::Pipeline::Chain::Config::Content::AutoDevops.new(pipeline, command).content,
project: project,
user: double,
- sha: double
+ sha: 'd310cc759caaa20cd05a9e0983d6017896d9c34c'
).execute
config_source = :auto_devops_source
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index d89202ae7fe..887759014f5 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -143,7 +143,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
context 'when usage_ping is disabled' do
it 'does not track the event' do
- stub_application_setting(usage_ping_enabled: false)
+ allow(::ServicePing::ServicePingSettings).to receive(:enabled?).and_return(false)
described_class.track_event(weekly_event, values: entity1, time: Date.current)
@@ -153,7 +153,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
context 'when usage_ping is enabled' do
before do
- stub_application_setting(usage_ping_enabled: true)
+ allow(::ServicePing::ServicePingSettings).to receive(:enabled?).and_return(true)
end
it 'tracks event when using symbol' do
diff --git a/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb
index d4f6110b3df..753e09731bf 100644
--- a/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb
@@ -8,12 +8,12 @@ RSpec.describe Gitlab::UsageDataCounters::RedisCounter, :clean_gitlab_redis_shar
subject { Class.new.extend(described_class) }
before do
- stub_application_setting(usage_ping_enabled: setting_value)
+ allow(::ServicePing::ServicePingSettings).to receive(:enabled?).and_return(service_ping_enabled)
end
describe '.increment' do
context 'when usage_ping is disabled' do
- let(:setting_value) { false }
+ let(:service_ping_enabled) { false }
it 'counter is not increased' do
expect do
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::UsageDataCounters::RedisCounter, :clean_gitlab_redis_shar
end
context 'when usage_ping is enabled' do
- let(:setting_value) { true }
+ let(:service_ping_enabled) { true }
it 'counter is increased' do
expect do
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::UsageDataCounters::RedisCounter, :clean_gitlab_redis_shar
describe '.increment_by' do
context 'when usage_ping is disabled' do
- let(:setting_value) { false }
+ let(:service_ping_enabled) { false }
it 'counter is not increased' do
expect do
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::UsageDataCounters::RedisCounter, :clean_gitlab_redis_shar
end
context 'when usage_ping is enabled' do
- let(:setting_value) { true }
+ let(:service_ping_enabled) { true }
it 'counter is increased' do
expect do
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
index 18acd767c6d..e0063194f9b 100644
--- a/spec/lib/gitlab/usage_data_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::UsageDataMetrics do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
end
- context 'whith instrumentation_class' do
+ context 'with instrumentation_class' do
it 'includes top level keys' do
expect(subject).to include(:uuid)
expect(subject).to include(:hostname)
@@ -26,11 +26,6 @@ RSpec.describe Gitlab::UsageDataMetrics do
expect(subject[:counts]).to include(:boards)
end
- it 'includes i_quickactions_approve monthly and weekly key' do
- expect(subject[:redis_hll_counters][:quickactions]).to include(:i_quickactions_approve_monthly)
- expect(subject[:redis_hll_counters][:quickactions]).to include(:i_quickactions_approve_weekly)
- end
-
it 'includes counts keys' do
expect(subject[:counts]).to include(:issues)
end
@@ -42,6 +37,43 @@ RSpec.describe Gitlab::UsageDataMetrics do
it 'includes usage_activity_by_stage_monthly keys' do
expect(subject[:usage_activity_by_stage_monthly][:plan]).to include(:issues)
end
+
+ it 'includes settings keys' do
+ expect(subject[:settings]).to include(:collected_data_categories)
+ end
+
+ describe 'Redis_HLL_counters' do
+ let(:metric_files_key_paths) do
+ Gitlab::Usage::MetricDefinition
+ .definitions
+ .select { |k, v| v.attributes[:data_source] == 'redis_hll' && v.key_path.starts_with?('redis_hll_counters') }
+ .keys
+ .sort
+ end
+
+ # Recursively traverse nested Hash of a generated Service Ping to return an Array of key paths
+ # in the dotted format used in metric definition YAML files, e.g.: 'count.category.metric_name'
+ def parse_service_ping_keys(object, key_path = [])
+ if object.is_a?(Hash)
+ object.each_with_object([]) do |(key, value), result|
+ result.append parse_service_ping_keys(value, key_path + [key])
+ end
+ else
+ key_path.join('.')
+ end
+ end
+
+ let(:service_ping_key_paths) do
+ parse_service_ping_keys(subject)
+ .flatten
+ .select { |k| k.starts_with?('redis_hll_counters') }
+ .sort
+ end
+
+ it 'is included in the Service Ping hash structure' do
+ expect(metric_files_key_paths).to match_array(service_ping_key_paths)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb b/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb
index 32d1288c59c..49682acbc66 100644
--- a/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb
@@ -5,6 +5,14 @@ require 'spec_helper'
RSpec.describe Gitlab::UsageDataNonSqlMetrics do
let(:default_count) { Gitlab::UsageDataNonSqlMetrics::SQL_METRIC_DEFAULT }
+ describe '#add_metric' do
+ let(:metric) { 'UuidMetric' }
+
+ it 'computes the metric value for given metric' do
+ expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid)
+ end
+ end
+
describe '.count' do
it 'returns default value for count' do
expect(described_class.count(User)).to eq(default_count)
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 438ae3efd11..64eff76a9f2 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -7,6 +7,14 @@ RSpec.describe Gitlab::UsageDataQueries do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
end
+ describe '#add_metric' do
+ let(:metric) { 'CountBoardsMetric' }
+
+ it 'builds the query for given metric' do
+ expect(described_class.add_metric(metric)).to eq('SELECT COUNT("boards"."id") FROM "boards"')
+ end
+ end
+
describe '.count' do
it 'returns the raw SQL' do
expect(described_class.count(User)).to start_with('SELECT COUNT("users"."id") FROM "users"')
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index d84974e562a..5d85ad5ad01 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -568,7 +568,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
expect(count_data[:projects_mattermost_active]).to eq(1)
expect(count_data[:groups_mattermost_active]).to eq(1)
- expect(count_data[:templates_mattermost_active]).to eq(1)
expect(count_data[:instances_mattermost_active]).to eq(1)
expect(count_data[:projects_inheriting_mattermost_active]).to eq(1)
expect(count_data[:groups_inheriting_slack_active]).to eq(1)
@@ -623,6 +622,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:deployments]).to eq(4)
expect(count_data[:successful_deployments]).to eq(2)
expect(count_data[:failed_deployments]).to eq(2)
+ expect(count_data[:feature_flags]).to eq(1)
expect(count_data[:snippets]).to eq(6)
expect(count_data[:personal_snippets]).to eq(2)
expect(count_data[:project_snippets]).to eq(4)
@@ -892,9 +892,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
expect(subject[:git][:version]).to eq(Gitlab::Git.version)
- expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
- expect(subject[:database][:version]).to eq(Gitlab::Database.version)
- expect(subject[:database][:pg_system_id]).to eq(Gitlab::Database.system_id)
+ expect(subject[:database][:adapter]).to eq(Gitlab::Database.main.adapter_name)
+ expect(subject[:database][:version]).to eq(Gitlab::Database.main.version)
+ expect(subject[:database][:pg_system_id]).to eq(Gitlab::Database.main.system_id)
expect(subject[:mail][:smtp_server]).to eq(ActionMailer::Base.smtp_settings[:address])
expect(subject[:gitaly][:version]).to be_present
expect(subject[:gitaly][:servers]).to be >= 1
@@ -1067,8 +1067,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.system_usage_data_settings }
- it 'gathers settings usage data', :aggregate_failures do
+ it 'gathers encrypted secrets usage data', :aggregate_failures do
expect(subject[:settings][:ldap_encrypted_secrets_enabled]).to eq(Gitlab::Auth::Ldap::Config.encrypted_secrets.active?)
+ expect(subject[:settings][:smtp_encrypted_secrets_enabled]).to eq(Gitlab::Email::SmtpConfig.encrypted_secrets.active?)
end
it 'populates operating system information' do
@@ -1080,7 +1081,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
it 'reports collected data categories' do
- expected_value = %w[Standard Subscription Operational Optional]
+ expected_value = %w[standard subscription operational optional]
allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
expect(instance).to receive(:execute).and_return(expected_value)
@@ -1360,6 +1361,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
"in_product_marketing_email_create_1_cta_clicked" => -1,
"in_product_marketing_email_create_2_sent" => -1,
"in_product_marketing_email_create_2_cta_clicked" => -1,
+ "in_product_marketing_email_team_short_0_sent" => -1,
+ "in_product_marketing_email_team_short_0_cta_clicked" => -1,
+ "in_product_marketing_email_trial_short_0_sent" => -1,
+ "in_product_marketing_email_trial_short_0_cta_clicked" => -1,
+ "in_product_marketing_email_admin_verify_0_sent" => -1,
+ "in_product_marketing_email_admin_verify_0_cta_clicked" => -1,
"in_product_marketing_email_verify_0_sent" => -1,
"in_product_marketing_email_verify_0_cta_clicked" => -1,
"in_product_marketing_email_verify_1_sent" => -1,
@@ -1399,6 +1406,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
"in_product_marketing_email_create_1_cta_clicked" => 0,
"in_product_marketing_email_create_2_sent" => 0,
"in_product_marketing_email_create_2_cta_clicked" => 0,
+ "in_product_marketing_email_team_short_0_sent" => 0,
+ "in_product_marketing_email_team_short_0_cta_clicked" => 0,
+ "in_product_marketing_email_trial_short_0_sent" => 0,
+ "in_product_marketing_email_trial_short_0_cta_clicked" => 0,
+ "in_product_marketing_email_admin_verify_0_sent" => 0,
+ "in_product_marketing_email_admin_verify_0_cta_clicked" => 0,
"in_product_marketing_email_verify_0_sent" => 1,
"in_product_marketing_email_verify_0_cta_clicked" => 0,
"in_product_marketing_email_verify_1_sent" => 0,
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 8f705d6a487..1d01d5c7e6a 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -5,6 +5,14 @@ require 'spec_helper'
RSpec.describe Gitlab::Utils::UsageData do
include Database::DatabaseHelpers
+ describe '#add_metric' do
+ let(:metric) { 'UuidMetric'}
+
+ it 'computes the metric value for given metric' do
+ expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid)
+ end
+ end
+
describe '#count' do
let(:relation) { double(:relation) }
@@ -41,10 +49,10 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#estimate_batch_distinct_count' do
let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE } # HyperLogLog is a probabilistic algorithm, which provides estimated data, with given error margin
- let(:relation) { double(:relation) }
+ let(:relation) { double(:relation, connection: double(:connection)) }
before do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) # rubocop: disable Database/MultipleDatabases
end
it 'delegates counting to counter class instance' do
@@ -95,6 +103,10 @@ RSpec.describe Gitlab::Utils::UsageData do
let(:build_needs_estimated_cardinality) { 5.217656147118495 }
let(:ci_builds_estimated_cardinality) { 2.0809220082170614 }
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) # rubocop: disable Database/MultipleDatabases
+ end
+
context 'different counting parameters' do
before_all do
1.upto(3) { |i| create(:ci_build_need, name: i, build: build) }
diff --git a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
index 8dbe64af1c7..9af21685c9e 100644
--- a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
+++ b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
@@ -82,7 +82,6 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Global do
it 'returns correct script' do
expect(global.terminal_value).to eq({
tag_list: [],
- yaml_variables: [],
job_variables: [],
options: {
before_script: ['ls'],
diff --git a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
index d6d0fc4224d..f8c4a28ed45 100644
--- a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
+++ b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
@@ -141,7 +141,6 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do
expect(entry.value)
.to eq(
tag_list: ['webide'],
- yaml_variables: [{ key: 'KEY', value: 'value', public: true }],
job_variables: [{ key: 'KEY', value: 'value', public: true }],
options: {
image: { name: "ruby:3.0" },
diff --git a/spec/lib/gitlab/x509/tag_spec.rb b/spec/lib/gitlab/x509/tag_spec.rb
index b011ea515de..be120aaf16a 100644
--- a/spec/lib/gitlab/x509/tag_spec.rb
+++ b/spec/lib/gitlab/x509/tag_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
RSpec.describe Gitlab::X509::Tag do
- subject(:signature) { described_class.new(tag).signature }
+ subject(:signature) { described_class.new(project.repository, tag).signature }
describe '#signature' do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:project) { create(:project, :repository) }
- describe 'signed tag' do
+ shared_examples 'signed tag' do
let(:tag) { project.repository.find_tag('v1.1.1') }
let(:certificate_attributes) do
{
@@ -33,10 +33,24 @@ RSpec.describe Gitlab::X509::Tag do
it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) }
end
- context 'unsigned tag' do
+ shared_examples 'unsigned tag' do
let(:tag) { project.repository.find_tag('v1.0.0') }
it { expect(signature).to be_nil }
end
+
+ context 'with :get_tag_signatures enabled' do
+ it_behaves_like 'signed tag'
+ it_behaves_like 'unsigned tag'
+ end
+
+ context 'with :get_tag_signatures disabled' do
+ before do
+ stub_feature_flags(get_tag_signatures: false)
+ end
+
+ it_behaves_like 'signed tag'
+ it_behaves_like 'unsigned tag'
+ end
end
end
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
index e5aae2822ed..6d50922904e 100644
--- a/spec/lib/peek/views/active_record_spec.rb
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
allow(connection_primary_1).to receive(:transaction_open?).and_return(false)
allow(connection_primary_2).to receive(:transaction_open?).and_return(true)
allow(connection_unknown).to receive(:transaction_open?).and_return(false)
+ allow(::Gitlab::Database).to receive(:db_config_name).and_return('the_db_config_name')
end
context 'when database load balancing is not enabled' do
@@ -77,32 +78,48 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
cached: '',
transaction: '',
duration: 1000.0,
- sql: 'SELECT * FROM users WHERE id = 10'
+ sql: 'SELECT * FROM users WHERE id = 10',
+ db_config_name: "Config name: the_db_config_name"
),
a_hash_including(
start: be_a(Time),
cached: 'Cached',
transaction: '',
duration: 2000.0,
- sql: 'SELECT * FROM users WHERE id = 10'
+ sql: 'SELECT * FROM users WHERE id = 10',
+ db_config_name: "Config name: the_db_config_name"
),
a_hash_including(
start: be_a(Time),
cached: '',
transaction: 'In a transaction',
duration: 3000.0,
- sql: 'UPDATE users SET admin = true WHERE id = 10'
+ sql: 'UPDATE users SET admin = true WHERE id = 10',
+ db_config_name: "Config name: the_db_config_name"
),
a_hash_including(
start: be_a(Time),
cached: '',
transaction: '',
duration: 4000.0,
- sql: 'SELECT VERSION()'
+ sql: 'SELECT VERSION()',
+ db_config_name: "Config name: the_db_config_name"
)
)
)
end
+
+ context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
+ before do
+ stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
+ end
+
+ it 'does not include db_config_name field' do
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
+
+ expect(subject.results[:details][0][:db_config_name]).to be_nil
+ end
+ end
end
context 'when database load balancing is enabled' do
@@ -114,7 +131,7 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_unknown).and_return(nil)
end
- it 'includes db role data' do
+ it 'includes db role data and db_config_name name' do
Timecop.freeze(2021, 2, 23, 10, 0) do
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
@@ -127,9 +144,9 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
summary: {
"Cached" => 1,
"In a transaction" => 1,
- "Primary" => 2,
- "Replica" => 1,
- "Unknown" => 1
+ "Role: Primary" => 2,
+ "Role: Replica" => 1,
+ "Role: Unknown" => 1
},
duration: '10000.00ms',
warnings: ["active-record duration: 10000.0 over 3000"],
@@ -140,7 +157,8 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
transaction: '',
duration: 1000.0,
sql: 'SELECT * FROM users WHERE id = 10',
- db_role: 'Primary'
+ db_role: 'Role: Primary',
+ db_config_name: "Config name: the_db_config_name"
),
a_hash_including(
start: be_a(Time),
@@ -148,7 +166,8 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
transaction: '',
duration: 2000.0,
sql: 'SELECT * FROM users WHERE id = 10',
- db_role: 'Replica'
+ db_role: 'Role: Replica',
+ db_config_name: "Config name: the_db_config_name"
),
a_hash_including(
start: be_a(Time),
@@ -156,7 +175,8 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
transaction: 'In a transaction',
duration: 3000.0,
sql: 'UPDATE users SET admin = true WHERE id = 10',
- db_role: 'Primary'
+ db_role: 'Role: Primary',
+ db_config_name: "Config name: the_db_config_name"
),
a_hash_including(
start: be_a(Time),
@@ -164,10 +184,23 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
transaction: '',
duration: 4000.0,
sql: 'SELECT VERSION()',
- db_role: 'Unknown'
+ db_role: 'Role: Unknown',
+ db_config_name: "Config name: the_db_config_name"
)
)
)
end
+
+ context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
+ before do
+ stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
+ end
+
+ it 'does not include db_config_name field' do
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
+
+ expect(subject.results[:details][0][:db_config_name]).to be_nil
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb b/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb
new file mode 100644
index 00000000000..1ba89af1b02
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::CiCdMenu do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:root_group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:group) { root_group }
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'Runners' do
+ let(:item_id) { :runners }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when feature flag :runner_list_group_view_vue_ui is disabled' do
+ before do
+ stub_feature_flags(runner_list_group_view_vue_ui: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb b/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb
new file mode 100644
index 00000000000..b68af6fb8ab
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::GroupInformationMenu do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:root_group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:group) { root_group }
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+
+ describe '#title' do
+ subject { described_class.new(context).title }
+
+ context 'when group is a root group' do
+ specify { is_expected.to eq 'Group information'}
+ end
+
+ context 'when group is a child group' do
+ let(:group) { build(:group, parent: root_group) }
+
+ specify { is_expected.to eq 'Subgroup information'}
+ end
+ end
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'menu access rights' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Activity' do
+ let(:item_id) { :activity }
+
+ specify { is_expected.not_to be_nil }
+
+ it_behaves_like 'menu access rights'
+ end
+
+ describe 'Labels' do
+ let(:item_id) { :labels }
+
+ it_behaves_like 'menu access rights'
+ end
+
+ describe 'Members' do
+ let(:item_id) { :members }
+
+ it_behaves_like 'menu access rights'
+ end
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/issues_menu_spec.rb b/spec/lib/sidebars/groups/menus/issues_menu_spec.rb
new file mode 100644
index 00000000000..3d55eb3af40
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/issues_menu_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::IssuesMenu do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+ let(:menu) { described_class.new(context) }
+
+ describe 'Menu Items' do
+ subject { menu.renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'menu access rights' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'List' do
+ let(:item_id) { :issue_list }
+
+ specify { is_expected.not_to be_nil }
+
+ it_behaves_like 'menu access rights'
+ end
+
+ describe 'Boards' do
+ let(:item_id) { :boards }
+
+ it_behaves_like 'menu access rights'
+ end
+
+ describe 'Milestones' do
+ let(:item_id) { :milestones }
+
+ it_behaves_like 'menu access rights'
+ end
+ end
+
+ it_behaves_like 'pill_count formatted results' do
+ let(:count_service) { ::Groups::OpenIssuesCountService }
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb b/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
new file mode 100644
index 00000000000..76e58367c9d
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::KubernetesMenu do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+ let(:menu) { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when user can read clusters' do
+ it 'returns true' do
+ expect(menu.render?).to eq true
+ end
+ end
+
+ context 'when user cannot read clusters rules' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(menu.render?).to eq false
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/groups/menus/merge_requests_menu_spec.rb
new file mode 100644
index 00000000000..3aceff29d6d
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/merge_requests_menu_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::MergeRequestsMenu do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+ let(:menu) { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when user can read merge requests' do
+ it 'returns true' do
+ expect(menu.render?).to eq true
+ end
+ end
+
+ context 'when user cannot read merge requests' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(menu.render?).to eq false
+ end
+ end
+ end
+
+ it_behaves_like 'pill_count formatted results' do
+ let(:count_service) { ::Groups::MergeRequestsCountService }
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
new file mode 100644
index 00000000000..5ebd67462f8
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+ let(:menu) { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when menu has menu items to show' do
+ it 'returns true' do
+ expect(menu.render?).to eq true
+ end
+ end
+
+ context 'when menu does not have any menu item to show' do
+ it 'returns false' do
+ stub_container_registry_config(enabled: false)
+ stub_config(packages: { enabled: false })
+ stub_config(dependency_proxy: { enabled: false })
+
+ expect(menu.render?).to eq false
+ end
+ end
+ end
+
+ describe '#link' do
+ let(:registry_enabled) { true }
+ let(:packages_enabled) { true }
+
+ before do
+ stub_container_registry_config(enabled: registry_enabled)
+ stub_config(packages: { enabled: packages_enabled })
+ stub_config(dependency_proxy: { enabled: true })
+ end
+
+ subject { menu.link }
+
+ context 'when Packages Registry is visible' do
+ it 'menu link points to Packages Registry page' do
+ expect(subject).to eq find_menu(menu, :packages_registry).link
+ end
+ end
+
+ context 'when Packages Registry is not visible' do
+ let(:packages_enabled) { false }
+
+ it 'menu link points to Container Registry page' do
+ expect(subject).to eq find_menu(menu, :container_registry).link
+ end
+
+ context 'when Container Registry is not visible' do
+ let(:registry_enabled) { false }
+
+ it 'menu link points to Dependency Proxy page' do
+ expect(subject).to eq find_menu(menu, :dependency_proxy).link
+ end
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { find_menu(menu, item_id) }
+
+ describe 'Packages Registry' do
+ let(:item_id) { :packages_registry }
+
+ context 'when user can read packages' do
+ before do
+ stub_config(packages: { enabled: packages_enabled })
+ end
+
+ context 'when config package setting is disabled' do
+ let(:packages_enabled) { false }
+
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when config package setting is enabled' do
+ let(:packages_enabled) { true }
+
+ it 'the menu item is added to list of menu items' do
+ is_expected.not_to be_nil
+ end
+ end
+ end
+ end
+
+ describe 'Container Registry' do
+ let(:item_id) { :container_registry }
+
+ context 'when user can read container images' do
+ before do
+ stub_container_registry_config(enabled: container_enabled)
+ end
+
+ context 'when config registry setting is disabled' do
+ let(:container_enabled) { false }
+
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when config registry setting is enabled' do
+ let(:container_enabled) { true }
+
+ it 'the menu item is added to list of menu items' do
+ is_expected.not_to be_nil
+ end
+
+ context 'when user cannot read container images' do
+ let(:user) { nil }
+
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+ end
+ end
+ end
+
+ describe 'Dependency Proxy' do
+ let(:item_id) { :dependency_proxy }
+
+ before do
+ stub_config(dependency_proxy: { enabled: dependency_enabled })
+ end
+
+ context 'when config dependency_proxy is enabled' do
+ let(:dependency_enabled) { true }
+
+ it 'the menu item is added to list of menu items' do
+ is_expected.not_to be_nil
+ end
+ end
+
+ context 'when config dependency_proxy is not enabled' do
+ let(:dependency_enabled) { false }
+
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+ end
+ end
+
+ private
+
+ def find_menu(menu, item)
+ menu.renderable_items.find { |i| i.item_id == item }
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
new file mode 100644
index 00000000000..314c4cdc602
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::SettingsMenu do
+ let_it_be(:owner) { create(:user) }
+
+ let_it_be_with_refind(:group) do
+ build(:group, :private).tap do |g|
+ g.add_owner(owner)
+ end
+ end
+
+ let(:user) { owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+ let(:menu) { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when user cannot admin group' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(menu.render?).to be false
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { menu.renderable_items.find { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ context 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'General menu' do
+ let(:item_id) { :general }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Integrations menu' do
+ let(:item_id) { :integrations }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Projects menu' do
+ let(:item_id) { :group_projects }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Repository menu' do
+ let(:item_id) { :repository }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'CI/CD menu' do
+ let(:item_id) { :ci_cd }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Applications menu' do
+ let(:item_id) { :applications }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Packages & Registries' do
+ let(:item_id) { :packages_and_registries }
+
+ before do
+ allow(group).to receive(:packages_feature_enabled?).and_return(packages_enabled)
+ end
+
+ describe 'when packages feature is disabled' do
+ let(:packages_enabled) { false }
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when packages feature is enabled' do
+ let(:packages_enabled) { true }
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
index 95009aa063f..1db80351e45 100644
--- a/spec/lib/sidebars/menu_spec.rb
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -26,6 +26,14 @@ RSpec.describe Sidebars::Menu do
it 'returns false' do
expect(menu.render?).to be false
end
+
+ context 'when menu has a partial' do
+ it 'returns true' do
+ allow(menu).to receive(:menu_partial).and_return('foo')
+
+ expect(menu.render?).to be true
+ end
+ end
end
context 'when the menu has items' do
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index cc4760e69e5..d6807451a25 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -51,8 +51,8 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
context 'when Container Registry is not visible' do
let(:registry_enabled) { false }
- it 'menu link points to Infrastructure Registry page' do
- expect(subject.link).to eq described_class.new(context).renderable_items.find { |i| i.item_id == :infrastructure_registry }.link
+ it 'does not display menu link' do
+ expect(subject.render?).to eq false
end
end
end
@@ -124,18 +124,22 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
describe 'Infrastructure Registry' do
let(:item_id) { :infrastructure_registry }
- context 'when feature flag :infrastructure_registry_page is enabled' do
- it 'the menu item is added to list of menu items' do
- stub_feature_flags(infrastructure_registry_page: true)
+ it 'the menu item is added to list of menu items' do
+ is_expected.not_to be_nil
+ end
+
+ context 'when config package setting is disabled' do
+ it 'does not add the menu item to the list' do
+ stub_config(packages: { enabled: false })
- is_expected.not_to be_nil
+ is_expected.to be_nil
end
end
- context 'when feature flag :infrastructure_registry_page is disabled' do
- it 'the menu item is not added to list of menu items' do
- stub_feature_flags(infrastructure_registry_page: false)
+ context 'when user cannot read packages' do
+ let(:user) { nil }
+ it 'does not add the menu item to the list' do
is_expected.to be_nil
end
end
diff --git a/spec/mailers/emails/pipelines_spec.rb b/spec/mailers/emails/pipelines_spec.rb
index a29835f3439..b9bc53625ac 100644
--- a/spec/mailers/emails/pipelines_spec.rb
+++ b/spec/mailers/emails/pipelines_spec.rb
@@ -9,12 +9,14 @@ RSpec.describe Emails::Pipelines do
let_it_be(:project) { create(:project, :repository) }
shared_examples_for 'correct pipeline information' do
- it 'has a correct information' do
- expect(subject)
- .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
- "#{project.name} | " \
- "#{pipeline.short_sha}".to_s
+ let(:expected_email_subject) do
+ "#{project.name} | " \
+ "#{status} pipeline for #{pipeline.source_ref} | " \
+ "#{pipeline.short_sha}"
+ end
+ it 'has a correct information' do
+ expect(subject).to have_subject expected_email_subject
expect(subject).to have_body_text pipeline.source_ref
expect(subject).to have_body_text status_text
end
@@ -28,11 +30,7 @@ RSpec.describe Emails::Pipelines do
end
it 'has correct information that there is no merge request link' do
- expect(subject)
- .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
- "#{project.name} | " \
- "#{pipeline.short_sha}".to_s
-
+ expect(subject).to have_subject expected_email_subject
expect(subject).to have_body_text pipeline.source_ref
expect(subject).to have_body_text status_text
end
@@ -48,11 +46,7 @@ RSpec.describe Emails::Pipelines do
end
it 'has correct information that there is a merge request link' do
- expect(subject)
- .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
- "#{project.name} | " \
- "#{pipeline.short_sha}".to_s
-
+ expect(subject).to have_subject expected_email_subject
expect(subject).to have_body_text merge_request.to_reference
expect(subject).to have_body_text pipeline.source_ref
expect(subject).not_to have_body_text pipeline.ref
@@ -70,11 +64,7 @@ RSpec.describe Emails::Pipelines do
end
it 'has correct information that there is a merge request link' do
- expect(subject)
- .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
- "#{project.name} | " \
- "#{pipeline.short_sha}".to_s
-
+ expect(subject).to have_subject expected_email_subject
expect(subject).to have_body_text merge_request.to_reference
expect(subject).to have_body_text pipeline.source_ref
end
@@ -91,6 +81,17 @@ RSpec.describe Emails::Pipelines do
it_behaves_like 'correct pipeline information' do
let(:status) { 'Successful' }
let(:status_text) { "Pipeline ##{pipeline.id} has passed!" }
+ let(:email_subject_suffix) { 'A Nice Suffix' }
+ let(:expected_email_subject) do
+ "#{project.name} | " \
+ "#{status} pipeline for #{pipeline.source_ref} | " \
+ "#{pipeline.short_sha} | " \
+ "#{email_subject_suffix}"
+ end
+
+ before do
+ stub_config_setting(email_subject_suffix: email_subject_suffix)
+ end
end
end
diff --git a/spec/mailers/emails/projects_spec.rb b/spec/mailers/emails/projects_spec.rb
index a5b89d16bc2..b9c71e35bc6 100644
--- a/spec/mailers/emails/projects_spec.rb
+++ b/spec/mailers/emails/projects_spec.rb
@@ -36,6 +36,27 @@ RSpec.describe Emails::Projects do
Notify.prometheus_alert_fired_email(project, user, alert)
end
+ it_behaves_like 'an email with X-GitLab headers containing project details'
+
+ it 'has expected X-GitLab alert headers', :aggregate_failures do
+ is_expected.to have_header('X-GitLab-Alert-ID', /#{alert.id}/)
+ is_expected.to have_header('X-GitLab-Alert-IID', /#{alert.iid}/)
+ is_expected.to have_header('X-GitLab-NotificationReason', "alert_#{alert.state}")
+
+ is_expected.not_to have_header('X-GitLab-Incident-ID', /.+/)
+ is_expected.not_to have_header('X-GitLab-Incident-IID', /.+/)
+ end
+
+ context 'with incident' do
+ let(:alert) { create(:alert_management_alert, :with_incident, :from_payload, payload: payload, project: project) }
+ let(:incident) { alert.issue }
+
+ it 'has expected X-GitLab incident headers', :aggregate_failures do
+ is_expected.to have_header('X-GitLab-Incident-ID', /#{incident.id}/)
+ is_expected.to have_header('X-GitLab-Incident-IID', /#{incident.iid}/)
+ end
+ end
+
context 'with empty payload' do
let(:payload) { {} }
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index ae956adf563..8272b5d64c1 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -781,7 +781,9 @@ RSpec.describe Notify do
let(:project_member) { invite_to_project(project, inviter: inviter) }
let(:inviter) { maintainer }
- subject { described_class.member_invited_email('project', project_member.id, project_member.invite_token) }
+ subject(:invite_email) do
+ described_class.member_invited_email('project', project_member.id, project_member.invite_token)
+ end
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
@@ -796,23 +798,13 @@ RSpec.describe Notify do
is_expected.to have_body_text project.full_name
is_expected.to have_body_text project_member.human_access.downcase
is_expected.to have_body_text project_member.invite_token
- is_expected.to have_link('Join now', href: invite_url(project_member.invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE))
- end
-
- it 'contains invite link for the group activity' do
- stub_experiments('members/invite_email': :activity)
-
+ is_expected.to have_link('Join now',
+ href: invite_url(project_member.invite_token,
+ invite_type: Emails::Members::INITIAL_INVITE,
+ experiment_name: 'invite_email_preview_text'))
is_expected.to have_content("#{inviter.name} invited you to join the")
is_expected.to have_content('Project details')
is_expected.to have_content("What's it about?")
- is_expected.not_to have_content('You are invited!')
- is_expected.not_to have_body_text 'What is a GitLab'
- end
-
- it 'has invite link for the control group' do
- stub_experiments('members/invite_email': :control)
-
- is_expected.to have_content('You are invited!')
end
end
@@ -824,6 +816,25 @@ RSpec.describe Notify do
is_expected.to have_body_text project.full_name
is_expected.to have_body_text project_member.human_access.downcase
is_expected.to have_body_text project_member.invite_token
+ is_expected.to have_link('Join now',
+ href: invite_url(project_member.invite_token,
+ invite_type: Emails::Members::INITIAL_INVITE,
+ experiment_name: 'invite_email_preview_text'))
+ is_expected.to have_content('Project details')
+ is_expected.to have_content("What's it about?")
+ end
+ end
+
+ context 'when invite email sent is tracked', :snowplow do
+ it 'tracks the sent invite' do
+ invite_email.deliver_now
+
+ expect_snowplow_event(
+ category: 'Notify',
+ action: 'invite_email_sent',
+ label: 'invite_email',
+ property: project_member.id.to_s
+ )
end
end
diff --git a/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb b/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb
new file mode 100644
index 00000000000..a0aae00776d
--- /dev/null
+++ b/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('update_issuable_slas_where_issue_closed')
+
+RSpec.describe UpdateIssuableSlasWhereIssueClosed, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:issuable_slas) { table(:issuable_slas) }
+ let(:issue_params) { { title: 'title', project_id: project.id } }
+ let(:issue_closed_state) { 2 }
+
+ let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:issue_open) { issues.create!(issue_params) }
+ let!(:issue_closed) { issues.create!(issue_params.merge(state_id: issue_closed_state)) }
+
+ let!(:issuable_sla_open_issue) { issuable_slas.create!(issue_id: issue_open.id, due_at: Time.now) }
+ let!(:issuable_sla_closed_issue) { issuable_slas.create!(issue_id: issue_closed.id, due_at: Time.now) }
+
+ it 'sets the issuable_closed attribute to false' do
+ expect(issuable_sla_open_issue.issuable_closed).to eq(false)
+ expect(issuable_sla_closed_issue.issuable_closed).to eq(false)
+
+ migrate!
+
+ expect(issuable_sla_open_issue.reload.issuable_closed).to eq(false)
+ expect(issuable_sla_closed_issue.reload.issuable_closed).to eq(true)
+ end
+end
diff --git a/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb b/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb
new file mode 100644
index 00000000000..130ad45ffc1
--- /dev/null
+++ b/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!('operations_feature_flags_correct_flexible_rollout_values')
+
+RSpec.describe OperationsFeatureFlagsCorrectFlexibleRolloutValues, :migration do
+ let_it_be(:strategies) { table(:operations_strategies) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'feature_flag', path: 'feature_flag') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let(:feature_flag) { table(:operations_feature_flags).create!(project_id: project.id, active: true, name: 'foo', iid: 1) }
+
+ describe "#up" do
+ described_class::STICKINESS.each do |old, new|
+ it "corrects parameters for flexible rollout stickiness #{old}" do
+ reversible_migration do |migration|
+ parameters = { groupId: "default", rollout: "100", stickiness: old }
+ strategy = create_strategy(parameters)
+
+ migration.before -> {
+ expect(strategy.reload.parameters).to eq({ "groupId" => "default", "rollout" => "100", "stickiness" => old })
+ }
+
+ migration.after -> {
+ expect(strategy.reload.parameters).to eq({ "groupId" => "default", "rollout" => "100", "stickiness" => new })
+ }
+ end
+ end
+ end
+
+ it 'ignores other strategies' do
+ reversible_migration do |migration|
+ parameters = { "groupId" => "default", "rollout" => "100", "stickiness" => "USERID" }
+ strategy = create_strategy(parameters, name: 'default')
+
+ migration.before -> {
+ expect(strategy.reload.parameters).to eq(parameters)
+ }
+
+ migration.after -> {
+ expect(strategy.reload.parameters).to eq(parameters)
+ }
+ end
+ end
+
+ it 'ignores other stickiness' do
+ reversible_migration do |migration|
+ parameters = { "groupId" => "default", "rollout" => "100", "stickiness" => "FOO" }
+ strategy = create_strategy(parameters)
+
+ migration.before -> {
+ expect(strategy.reload.parameters).to eq(parameters)
+ }
+
+ migration.after -> {
+ expect(strategy.reload.parameters).to eq(parameters)
+ }
+ end
+ end
+ end
+
+ def create_strategy(params, name: 'flexibleRollout')
+ strategies.create!(name: name, parameters: params, feature_flag_id: feature_flag.id)
+ end
+end
diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
new file mode 100644
index 00000000000..535472f5931
--- /dev/null
+++ b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('create_base_work_item_types')
+
+RSpec.describe CreateBaseWorkItemTypes, :migration do
+ let!(:work_item_types) { table(:work_item_types) }
+
+ it 'creates default data' do
+ reversible_migration do |migration|
+ migration.before -> {
+ # Depending on whether the migration has been run before,
+ # the size could be 4, or 0, so we don't set any expectations
+ }
+
+ migration.after -> {
+ expect(work_item_types.count).to eq 4
+ expect(work_item_types.all.pluck(:base_type)).to match_array WorkItem::Type.base_types.values
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
new file mode 100644
index 00000000000..819120d43ef
--- /dev/null
+++ b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!('update_trial_plans_ci_daily_pipeline_schedule_triggers')
+
+RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration do
+ let!(:plans) { table(:plans) }
+ let!(:plan_limits) { table(:plan_limits) }
+ let!(:premium_trial_plan) { plans.create!(name: 'premium_trial', title: 'Premium Trial') }
+ let!(:ultimate_trial_plan) { plans.create!(name: 'ultimate_trial', title: 'Ultimate Trial') }
+
+ describe '#up' do
+ let!(:premium_trial_plan_limits) { plan_limits.create!(plan_id: premium_trial_plan.id, ci_daily_pipeline_schedule_triggers: 0) }
+ let!(:ultimate_trial_plan_limits) { plan_limits.create!(plan_id: ultimate_trial_plan.id, ci_daily_pipeline_schedule_triggers: 0) }
+
+ context 'when the environment is dev or com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ end
+
+ it 'sets the trial plan limits for ci_daily_pipeline_schedule_triggers' do
+ disable_migrations_output { migrate! }
+
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ end
+
+ it 'does not change the plan limits if the ultimate trial plan is missing' do
+ ultimate_trial_plan.destroy!
+
+ expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ end
+
+ it 'does not change the plan limits if the ultimate trial plan limits is missing' do
+ ultimate_trial_plan_limits.destroy!
+
+ expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ end
+
+ it 'does not change the plan limits if the premium trial plan is missing' do
+ premium_trial_plan.destroy!
+
+ expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ end
+
+ it 'does not change the plan limits if the premium trial plan limits is missing' do
+ premium_trial_plan_limits.destroy!
+
+ expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ end
+ end
+
+ context 'when the environment is anything other than dev or com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ end
+
+ it 'does not update the plan limits' do
+ disable_migrations_output { migrate! }
+
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ end
+ end
+ end
+
+ describe '#down' do
+ let!(:premium_trial_plan_limits) { plan_limits.create!(plan_id: premium_trial_plan.id, ci_daily_pipeline_schedule_triggers: 288) }
+ let!(:ultimate_trial_plan_limits) { plan_limits.create!(plan_id: ultimate_trial_plan.id, ci_daily_pipeline_schedule_triggers: 288) }
+
+ context 'when the environment is dev or com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ end
+
+ it 'sets the trial plan limits ci_daily_pipeline_schedule_triggers to zero' do
+ migrate_down!
+
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
+ end
+
+ it 'does not change the plan limits if the ultimate trial plan is missing' do
+ ultimate_trial_plan.destroy!
+
+ expect { migrate_down! }.not_to change { plan_limits.count }
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ end
+
+ it 'does not change the plan limits if the ultimate trial plan limits is missing' do
+ ultimate_trial_plan_limits.destroy!
+
+ expect { migrate_down! }.not_to change { plan_limits.count }
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ end
+
+ it 'does not change the plan limits if the premium trial plan is missing' do
+ premium_trial_plan.destroy!
+
+ expect { migrate_down! }.not_to change { plan_limits.count }
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ end
+
+ it 'does not change the plan limits if the premium trial plan limits is missing' do
+ premium_trial_plan_limits.destroy!
+
+ expect { migrate_down! }.not_to change { plan_limits.count }
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ end
+ end
+
+ context 'when the environment is anything other than dev or com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ end
+
+ it 'does not change the ultimate trial plan limits' do
+ migrate_down!
+
+ expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
+ end
+ end
+ end
+
+ def migrate_down!
+ disable_migrations_output do
+ migrate!
+ described_class.new.down
+ end
+ end
+end
diff --git a/spec/migrations/add_triggers_to_integrations_type_new_spec.rb b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
new file mode 100644
index 00000000000..07845715a52
--- /dev/null
+++ b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddTriggersToIntegrationsTypeNew do
+ let(:migration) { described_class.new }
+ let(:integrations) { table(:integrations) }
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger' do
+ it 'sets `type_new` to the transformed `type` class name' do
+ Gitlab::Integrations::StiType.namespaced_integrations.each do |type|
+ integration = integrations.create!(type: "#{type}Service")
+
+ expect(integration.reload).to have_attributes(
+ type: "#{type}Service",
+ type_new: "Integrations::#{type}"
+ )
+ end
+ end
+
+ it 'ignores types that are not namespaced' do
+ # We don't actually have any integrations without namespaces,
+ # but we can abuse one of the integration base classes.
+ integration = integrations.create!(type: 'BaseIssueTracker')
+
+ expect(integration.reload).to have_attributes(
+ type: 'BaseIssueTracker',
+ type_new: nil
+ )
+ end
+
+ it 'ignores types that are unknown' do
+ integration = integrations.create!(type: 'FooBar')
+
+ expect(integration.reload).to have_attributes(
+ type: 'FooBar',
+ type_new: nil
+ )
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ it 'drops the INSERT trigger' do
+ integration = integrations.create!(type: 'JiraService')
+
+ expect(integration.reload).to have_attributes(
+ type: 'JiraService',
+ type_new: nil
+ )
+ end
+ end
+end
diff --git a/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
new file mode 100644
index 00000000000..ce0ab4223e8
--- /dev/null
+++ b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20210629031900_associate_existing_dast_builds_with_variables.rb')
+
+RSpec.describe AssociateExistingDastBuildsWithVariables do
+ subject(:migration) { described_class.new }
+
+ let_it_be(:namespaces_table) { table(:namespaces) }
+ let_it_be(:projects_table) { table(:projects) }
+ let_it_be(:ci_pipelines_table) { table(:ci_pipelines) }
+ let_it_be(:ci_builds_table) { table(:ci_builds) }
+ let_it_be(:dast_sites_table) { table(:dast_sites) }
+ let_it_be(:dast_site_profiles_table) { table(:dast_site_profiles) }
+ let_it_be(:dast_scanner_profiles_table) { table(:dast_scanner_profiles) }
+ let_it_be(:dast_site_profiles_builds_table) { table(:dast_site_profiles_builds) }
+ let_it_be(:dast_profiles_table) { table(:dast_profiles) }
+ let_it_be(:dast_profiles_pipelines_table) { table(:dast_profiles_pipelines) }
+
+ let!(:group) { namespaces_table.create!(type: 'Group', name: 'group', path: 'group') }
+ let!(:project) { projects_table.create!(name: 'project', path: 'project', namespace_id: group.id) }
+
+ let!(:pipeline_0) { ci_pipelines_table.create!(project_id: project.id, source: 13) }
+ let!(:pipeline_1) { ci_pipelines_table.create!(project_id: project.id, source: 13) }
+ let!(:build_0) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_0.id, name: :dast, stage: :dast) }
+ let!(:build_1) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_0.id, name: :dast, stage: :dast) }
+ let!(:build_2) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, name: :dast, stage: :dast) }
+ let!(:build_3) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, name: :dast) }
+ let!(:build_4) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, stage: :dast) }
+
+ let!(:dast_site) { dast_sites_table.create!(project_id: project.id, url: generate(:url)) }
+ let!(:dast_site_profile) { dast_site_profiles_table.create!(project_id: project.id, dast_site_id: dast_site.id, name: SecureRandom.hex) }
+ let!(:dast_scanner_profile) { dast_scanner_profiles_table.create!(project_id: project.id, name: SecureRandom.hex) }
+
+ let!(:dast_profile) do
+ dast_profiles_table.create!(
+ project_id: project.id,
+ dast_site_profile_id: dast_site_profile.id,
+ dast_scanner_profile_id: dast_scanner_profile.id,
+ name: SecureRandom.hex,
+ description: SecureRandom.hex
+ )
+ end
+
+ let!(:dast_profiles_pipeline_0) { dast_profiles_pipelines_table.create!(dast_profile_id: dast_profile.id, ci_pipeline_id: pipeline_0.id) }
+ let!(:dast_profiles_pipeline_1) { dast_profiles_pipelines_table.create!(dast_profile_id: dast_profile.id, ci_pipeline_id: pipeline_1.id) }
+
+ context 'when there are ci_pipelines with associated dast_profiles' do
+ describe 'migration up' do
+ it 'adds association of dast_site_profiles to ci_builds', :aggregate_failures do
+ expect(dast_site_profiles_builds_table.all).to be_empty
+
+ migration.up
+
+ expected_results = [
+ [dast_site_profile.id, build_0.id],
+ [dast_site_profile.id, build_1.id],
+ [dast_site_profile.id, build_2.id]
+ ]
+
+ expect(dast_site_profiles_builds_table.all.map { |assoc| [assoc.dast_site_profile_id, assoc.ci_build_id] }).to contain_exactly(*expected_results)
+ end
+ end
+ end
+
+ describe 'migration down' do
+ it 'deletes all records in the dast_site_profiles_builds table', :aggregate_failures do
+ expect(dast_site_profiles_builds_table.all).to be_empty
+
+ migration.up
+ migration.down
+
+ expect(dast_site_profiles_builds_table.all).to be_empty
+ end
+ end
+end
diff --git a/spec/migrations/backfill_integrations_type_new_spec.rb b/spec/migrations/backfill_integrations_type_new_spec.rb
new file mode 100644
index 00000000000..5b8fbf6f555
--- /dev/null
+++ b/spec/migrations/backfill_integrations_type_new_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillIntegrationsTypeNew do
+ let_it_be(:migration) { described_class::MIGRATION }
+ let_it_be(:integrations) { table(:integrations) }
+
+ before do
+ integrations.create!(id: 1)
+ integrations.create!(id: 2)
+ integrations.create!(id: 3)
+ integrations.create!(id: 4)
+ integrations.create!(id: 5)
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of integrations' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :integrations,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/backfill_issues_upvotes_count_spec.rb b/spec/migrations/backfill_issues_upvotes_count_spec.rb
index f2bea0edea0..94cfa29ae89 100644
--- a/spec/migrations/backfill_issues_upvotes_count_spec.rb
+++ b/spec/migrations/backfill_issues_upvotes_count_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe BackfillIssuesUpvotesCount do
let!(:award_emoji3) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue3.id) }
let!(:award_emoji4) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue4.id) }
- it 'correctly schedules background migrations' do
+ it 'correctly schedules background migrations', :aggregate_failures do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
Sidekiq::Testing.fake! do
diff --git a/spec/migrations/confirm_security_bot_spec.rb b/spec/migrations/confirm_security_bot_spec.rb
new file mode 100644
index 00000000000..19ca81f92f3
--- /dev/null
+++ b/spec/migrations/confirm_security_bot_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ConfirmSecurityBot, :migration do
+ let(:users) { table(:users) }
+
+ let(:user_type) { 8 }
+
+ context 'when bot is not created' do
+ it 'skips migration' do
+ migrate!
+
+ bot = users.find_by(user_type: user_type)
+
+ expect(bot).to be_nil
+ end
+ end
+
+ context 'when bot is confirmed' do
+ let(:bot) { table(:users).create!(user_type: user_type, confirmed_at: Time.current, projects_limit: 1) }
+
+ it 'skips migration' do
+ expect { migrate! }.not_to change { bot.reload.confirmed_at }
+ end
+ end
+
+ context 'when bot is not confirmed' do
+ let(:bot) { table(:users).create!(user_type: user_type, projects_limit: 1) }
+
+ it 'update confirmed_at' do
+ freeze_time do
+ expect { migrate! }.to change { bot.reload.confirmed_at }.from(nil).to(Time.current)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb b/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb
new file mode 100644
index 00000000000..b7a91abf5d7
--- /dev/null
+++ b/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe GenerateCustomersDotJwtSigningKey do
+ let(:application_settings) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'application_settings'
+
+ attr_encrypted :customers_dot_jwt_signing_key, {
+ mode: :per_attribute_iv,
+ key: Gitlab::Utils.ensure_utf8_size(Rails.application.secrets.db_key_base, bytes: 32.bytes),
+ algorithm: 'aes-256-gcm',
+ encode: true
+ }
+ end
+ end
+
+ it 'generates JWT signing key' do
+ application_settings.create!
+
+ reversible_migration do |migration|
+ migration.before -> {
+ settings = application_settings.first
+
+ expect(settings.customers_dot_jwt_signing_key).to be_nil
+ expect(settings.encrypted_customers_dot_jwt_signing_key).to be_nil
+ expect(settings.encrypted_customers_dot_jwt_signing_key_iv).to be_nil
+ }
+
+ migration.after -> {
+ settings = application_settings.first
+
+ expect(settings.encrypted_customers_dot_jwt_signing_key).to be_present
+ expect(settings.encrypted_customers_dot_jwt_signing_key_iv).to be_present
+ expect { OpenSSL::PKey::RSA.new(settings.customers_dot_jwt_signing_key) }.not_to raise_error
+ }
+ end
+ end
+end
diff --git a/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb b/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb
new file mode 100644
index 00000000000..889c04700c7
--- /dev/null
+++ b/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration! 'orphaned_invite_tokens_cleanup'
+
+RSpec.describe OrphanedInviteTokensCleanup, :migration do
+ def create_member(**extra_attributes)
+ defaults = {
+ access_level: 10,
+ source_id: 1,
+ source_type: "Project",
+ notification_level: 0,
+ type: 'ProjectMember'
+ }
+
+ table(:members).create!(defaults.merge(extra_attributes))
+ end
+
+ describe '#up', :aggregate_failures do
+ it 'removes invite tokens for accepted records with invite_accepted_at < created_at' do
+ record1 = create_member(invite_token: 'foo', invite_accepted_at: 1.day.ago, created_at: 1.hour.ago)
+ record2 = create_member(invite_token: 'foo2', invite_accepted_at: nil, created_at: 1.hour.ago)
+ record3 = create_member(invite_token: 'foo3', invite_accepted_at: 1.day.ago, created_at: 1.year.ago)
+
+ migrate!
+
+ expect(table(:members).find(record1.id).invite_token).to eq nil
+ expect(table(:members).find(record2.id).invite_token).to eq 'foo2'
+ expect(table(:members).find(record3.id).invite_token).to eq 'foo3'
+ end
+ end
+end
diff --git a/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
index 354a0896ac9..8a9b993b869 100644
--- a/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb
+++ b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe ReScheduleLatestPipelineIdPopulation do
+RSpec.describe ReScheduleLatestPipelineIdPopulationWithAllSecurityRelatedArtifactTypes do
let(:namespaces) { table(:namespaces) }
let(:pipelines) { table(:ci_pipelines) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/schedule_delete_orphaned_deployments_spec.rb b/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb
index 618958a3d90..eb91602388c 100644
--- a/spec/migrations/schedule_delete_orphaned_deployments_spec.rb
+++ b/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe ScheduleDeleteOrphanedDeployments, :sidekiq, schema: 20210617161348 do
+RSpec.describe RescheduleDeleteOrphanedDeployments, :sidekiq, schema: 20210617161348 do
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
@@ -22,6 +22,31 @@ RSpec.describe ScheduleDeleteOrphanedDeployments, :sidekiq, schema: 202106171613
stub_const("#{described_class}::BATCH_SIZE", 1)
end
+ it 'steal existing background migration jobs' do
+ expect(Gitlab::BackgroundMigration).to receive(:steal).with('DeleteOrphanedDeployments')
+
+ migrate!
+ end
+
+ it 'cleans up background migration jobs tracking records' do
+ old_successful_job = background_migration_jobs.create!(
+ class_name: 'DeleteOrphanedDeployments',
+ status: Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded],
+ arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)]
+ )
+
+ old_pending_job = background_migration_jobs.create!(
+ class_name: 'DeleteOrphanedDeployments',
+ status: Gitlab::Database::BackgroundMigrationJob.statuses[:pending],
+ arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)]
+ )
+
+ migrate!
+
+ expect { old_successful_job.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { old_pending_job.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
it 'schedules DeleteOrphanedDeployments background jobs' do
Sidekiq::Testing.fake! do
freeze_time do
diff --git a/spec/migrations/reset_job_token_scope_enabled_again_spec.rb b/spec/migrations/reset_job_token_scope_enabled_again_spec.rb
new file mode 100644
index 00000000000..da6817f6f21
--- /dev/null
+++ b/spec/migrations/reset_job_token_scope_enabled_again_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ResetJobTokenScopeEnabledAgain do
+ let(:settings) { table(:project_ci_cd_settings) }
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project_1) { projects.create!(name: 'proj-1', path: 'gitlab-org', namespace_id: namespace.id)}
+ let(:project_2) { projects.create!(name: 'proj-2', path: 'gitlab-org', namespace_id: namespace.id)}
+
+ before do
+ settings.create!(id: 1, project_id: project_1.id, job_token_scope_enabled: true)
+ settings.create!(id: 2, project_id: project_2.id, job_token_scope_enabled: false)
+ end
+
+ it 'migrates job_token_scope_enabled to be always false' do
+ expect { migrate! }
+ .to change { settings.where(job_token_scope_enabled: false).count }
+ .from(1).to(2)
+ end
+end
diff --git a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb
deleted file mode 100644
index 5a1c07d810f..00000000000
--- a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ScheduleBackfillDraftStatusOnMergeRequests, :sidekiq do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: false,
- state_id: n
- )
- end
- end
-
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- it 'schedules BackfillDraftStatusOnMergeRequests background jobs' do
- Sidekiq::Testing.fake! do
- draft_mrs = Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests::MergeRequest.eligible
-
- first_mr_id = draft_mrs.first.id
- second_mr_id = draft_mrs.second.id
-
- freeze_time do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(7)
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, first_mr_id, first_mr_id)
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(4.minutes, second_mr_id, second_mr_id)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
new file mode 100644
index 00000000000..012c7d065fc
--- /dev/null
+++ b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleCopyCiBuildsColumnsToSecurityScans2 do
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:ci_pipelines) { table(:ci_pipelines) }
+ let_it_be(:ci_builds) { table(:ci_builds) }
+ let_it_be(:security_scans) { table(:security_scans) }
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:pipeline) { ci_pipelines.create!(status: "success")}
+
+ let!(:build1) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) }
+ let!(:build2) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) }
+ let!(:build3) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) }
+
+ let!(:scan1) { security_scans.create!(build_id: build1.id, scan_type: 1) }
+ let!(:scan2) { security_scans.create!(build_id: build2.id, scan_type: 1) }
+ let!(:scan3) { security_scans.create!(build_id: build3.id, scan_type: 1) }
+
+ let!(:job_class_name) { described_class::MIGRATION }
+ let!(:tracked_pending_job) { background_migration_jobs.create!(class_name: job_class_name, status: 0, arguments: [1]) }
+ let!(:tracked_successful_job) { background_migration_jobs.create!(class_name: job_class_name, status: 1, arguments: [2]) }
+ let(:jobs) { Gitlab::Database::BackgroundMigrationJob.where(id: [tracked_pending_job.id, tracked_successful_job.id] ).for_migration_class(job_class_name) }
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ allow_next_instance_of(Gitlab::BackgroundMigration::CopyCiBuildsColumnsToSecurityScans) do |instance|
+ allow(instance).to receive(:mark_job_as_succeeded)
+ end
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migrations', :aggregate_failures do
+ expect(jobs).not_to be_empty
+
+ migrate!
+
+ expect(jobs).to be_empty
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, scan1.id, scan2.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, scan3.id, scan3.id)
+ end
+end
diff --git a/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb b/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb
new file mode 100644
index 00000000000..77f298b5ecb
--- /dev/null
+++ b/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleRecalculateUuidOnVulnerabilitiesOccurrences3 do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let(:different_vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v4',
+ external_id: 'uuid-v4',
+ fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89',
+ name: 'Identifier for UUIDv4')
+ end
+
+ let(:vulnerability_for_uuidv4) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let(:vulnerability_for_uuidv5) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:finding1) do
+ create_finding!(
+ vulnerability_id: vulnerability_for_uuidv4.id,
+ project_id: project.id,
+ scanner_id: different_scanner.id,
+ primary_identifier_id: different_vulnerability_identifier.id,
+ location_fingerprint: 'fa18f432f1d56675f4098d318739c3cd5b14eb3e',
+ uuid: 'b3cc2518-5446-4dea-871c-89d5e999c1ac'
+ )
+ end
+
+ let!(:finding2) do
+ create_finding!(
+ vulnerability_id: vulnerability_for_uuidv5.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: '838574be0210968bf6b9f569df9c2576242cbf0a',
+ uuid: '77211ed6-7dff-5f6b-8c9a-da89ad0a9b60'
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migrations', :aggregate_failures do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, finding1.id, finding1.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, finding2.id, finding2.id)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, location_fingerprint:, uuid:)
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: 'test',
+ severity: 7,
+ confidence: 7,
+ report_type: 0,
+ project_fingerprint: '123qweasdzxc',
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: 'test',
+ raw_metadata: 'test',
+ uuid: uuid
+ )
+ end
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0
+ )
+ end
+end
diff --git a/spec/migrations/schedule_security_setting_creation_spec.rb b/spec/migrations/schedule_security_setting_creation_spec.rb
new file mode 100644
index 00000000000..e1b7b540d7f
--- /dev/null
+++ b/spec/migrations/schedule_security_setting_creation_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleSecuritySettingCreation, :sidekiq do
+ describe '#up' do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+
+ context 'for EE version' do
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ allow(Gitlab).to receive(:ee?).and_return(true)
+ end
+
+ it 'schedules background migration job' do
+ namespace = namespaces.create!(name: 'test', path: 'test')
+ projects.create!(id: 12, namespace_id: namespace.id, name: 'red', path: 'red')
+ projects.create!(id: 13, namespace_id: namespace.id, name: 'green', path: 'green')
+ projects.create!(id: 14, namespace_id: namespace.id, name: 'blue', path: 'blue')
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(5.minutes, 12, 13)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(10.minutes, 14, 14)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+
+ context 'for FOSS version' do
+ before do
+ allow(Gitlab).to receive(:ee?).and_return(false)
+ end
+
+ it 'does not schedule any jobs' do
+ namespace = namespaces.create!(name: 'test', path: 'test')
+ projects.create!(id: 12, namespace_id: namespace.id, name: 'red', path: 'red')
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(0)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index 18d486740b8..35398e29062 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -33,70 +33,6 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to validate_length_of(:service).is_at_most(100) }
it { is_expected.to validate_length_of(:monitoring_tool).is_at_most(100) }
- context 'when status is triggered' do
- subject { triggered_alert }
-
- context 'when ended_at is blank' do
- it { is_expected.to be_valid }
- end
-
- context 'when ended_at is present' do
- before do
- triggered_alert.ended_at = Time.current
- end
-
- it { is_expected.to be_invalid }
- end
- end
-
- context 'when status is acknowledged' do
- subject { acknowledged_alert }
-
- context 'when ended_at is blank' do
- it { is_expected.to be_valid }
- end
-
- context 'when ended_at is present' do
- before do
- acknowledged_alert.ended_at = Time.current
- end
-
- it { is_expected.to be_invalid }
- end
- end
-
- context 'when status is resolved' do
- subject { resolved_alert }
-
- context 'when ended_at is blank' do
- before do
- resolved_alert.ended_at = nil
- end
-
- it { is_expected.to be_invalid }
- end
-
- context 'when ended_at is present' do
- it { is_expected.to be_valid }
- end
- end
-
- context 'when status is ignored' do
- subject { ignored_alert }
-
- context 'when ended_at is blank' do
- it { is_expected.to be_valid }
- end
-
- context 'when ended_at is present' do
- before do
- ignored_alert.ended_at = Time.current
- end
-
- it { is_expected.to be_invalid }
- end
- end
-
describe 'fingerprint' do
let_it_be(:fingerprint) { 'fingerprint' }
let_it_be(:project3, refind: true) { create(:project) }
@@ -112,30 +48,30 @@ RSpec.describe AlertManagement::Alert do
let_it_be(:existing_alert, refind: true) { create(:alert_management_alert, fingerprint: fingerprint, project: project3) }
# We are only validating uniqueness for non-resolved alerts
- where(:existing_status, :new_status, :valid) do
- :resolved | :triggered | true
- :resolved | :acknowledged | true
- :resolved | :ignored | true
- :resolved | :resolved | true
- :triggered | :triggered | false
- :triggered | :acknowledged | false
- :triggered | :ignored | false
- :triggered | :resolved | true
- :acknowledged | :triggered | false
- :acknowledged | :acknowledged | false
- :acknowledged | :ignored | false
- :acknowledged | :resolved | true
- :ignored | :triggered | false
- :ignored | :acknowledged | false
- :ignored | :ignored | false
- :ignored | :resolved | true
+ where(:existing_status_event, :new_status, :valid) do
+ :resolve | :triggered | true
+ :resolve | :acknowledged | true
+ :resolve | :ignored | true
+ :resolve | :resolved | true
+ :trigger | :triggered | false
+ :trigger | :acknowledged | false
+ :trigger | :ignored | false
+ :trigger | :resolved | true
+ :acknowledge | :triggered | false
+ :acknowledge | :acknowledged | false
+ :acknowledge | :ignored | false
+ :acknowledge | :resolved | true
+ :ignore | :triggered | false
+ :ignore | :acknowledged | false
+ :ignore | :ignored | false
+ :ignore | :resolved | true
end
with_them do
let(:new_alert) { build(:alert_management_alert, new_status, fingerprint: fingerprint, project: project3) }
before do
- existing_alert.change_status_to(existing_status)
+ existing_alert.update!(status_event: existing_status_event)
end
if params[:valid]
@@ -196,20 +132,6 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to match_array(triggered_alert) }
end
- describe '.for_status' do
- let(:status) { :resolved }
-
- subject { AlertManagement::Alert.for_status(status) }
-
- it { is_expected.to match_array(resolved_alert) }
-
- context 'with multiple statuses' do
- let(:status) { [:resolved, :ignored] }
-
- it { is_expected.to match_array([resolved_alert, ignored_alert]) }
- end
- end
-
describe '.for_fingerprint' do
let(:fingerprint) { SecureRandom.hex }
let(:alert_with_fingerprint) { triggered_alert }
@@ -302,41 +224,7 @@ RSpec.describe AlertManagement::Alert do
end
end
- describe '.status_value' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :status_value) do
- :triggered | 0
- :acknowledged | 1
- :resolved | 2
- :ignored | 3
- :unknown | nil
- end
-
- with_them do
- it 'returns status value by its name' do
- expect(described_class.status_value(status)).to eq(status_value)
- end
- end
- end
-
- describe '.status_name' do
- using RSpec::Parameterized::TableSyntax
-
- where(:raw_status, :status) do
- 0 | :triggered
- 1 | :acknowledged
- 2 | :resolved
- 3 | :ignored
- -1 | nil
- end
-
- with_them do
- it 'returns status name by its values' do
- expect(described_class.status_name(raw_status)).to eq(status)
- end
- end
- end
+ it_behaves_like 'a model including Escalatable'
describe '.counts_by_status' do
subject { described_class.counts_by_status }
@@ -454,85 +342,17 @@ RSpec.describe AlertManagement::Alert do
end
end
- describe '#to_reference' do
- it { expect(triggered_alert.to_reference).to eq("^alert##{triggered_alert.iid}") }
- end
-
- describe '#trigger' do
- subject { alert.trigger }
-
- context 'when alert is in triggered state' do
- let(:alert) { triggered_alert }
-
- it 'does not change the alert status' do
- expect { subject }.not_to change { alert.reload.status }
- end
- end
-
- context 'when alert not in triggered state' do
- let(:alert) { resolved_alert }
-
- it 'changes the alert status to triggered' do
- expect { subject }.to change { alert.triggered? }.to(true)
- end
-
- it 'resets ended at' do
- expect { subject }.to change { alert.reload.ended_at }.to nil
- end
- end
- end
-
- describe '#acknowledge' do
- subject { alert.acknowledge }
-
- let(:alert) { resolved_alert }
-
- it 'changes the alert status to acknowledged' do
- expect { subject }.to change { alert.acknowledged? }.to(true)
- end
-
- it 'resets ended at' do
- expect { subject }.to change { alert.reload.ended_at }.to nil
- end
- end
-
- describe '#resolve' do
- let!(:ended_at) { Time.current }
-
- subject do
- alert.ended_at = ended_at
- alert.resolve
- end
-
- context 'when alert already resolved' do
- let(:alert) { resolved_alert }
-
- it 'does not change the alert status' do
- expect { subject }.not_to change { resolved_alert.reload.status }
- end
- end
-
- context 'when alert is not resolved' do
- let(:alert) { triggered_alert }
-
- it 'changes alert status to "resolved"' do
- expect { subject }.to change { alert.resolved? }.to(true)
- end
+ describe '#open?' do
+ it 'returns true when the status is open status' do
+ expect(triggered_alert.open?).to be true
+ expect(acknowledged_alert.open?).to be true
+ expect(resolved_alert.open?).to be false
+ expect(ignored_alert.open?).to be false
end
end
- describe '#ignore' do
- subject { alert.ignore }
-
- let(:alert) { resolved_alert }
-
- it 'changes the alert status to ignored' do
- expect { subject }.to change { alert.ignored? }.to(true)
- end
-
- it 'resets ended at' do
- expect { subject }.to change { alert.reload.ended_at }.to nil
- end
+ describe '#to_reference' do
+ it { expect(triggered_alert.to_reference).to eq("^alert##{triggered_alert.iid}") }
end
describe '#register_new_event!' do
@@ -545,53 +365,20 @@ RSpec.describe AlertManagement::Alert do
end
end
- describe '#status_event_for' do
- using RSpec::Parameterized::TableSyntax
-
- where(:for_status, :event) do
- :triggered | :trigger
- 'triggered' | :trigger
- :acknowledged | :acknowledge
- 'acknowledged' | :acknowledge
- :resolved | :resolve
- 'resolved' | :resolve
- :ignored | :ignore
- 'ignored' | :ignore
- :unknown | nil
- nil | nil
- '' | nil
- 1 | nil
- end
+ describe '#resolved_at' do
+ subject { resolved_alert.resolved_at }
- with_them do
- let(:alert) { build(:alert_management_alert, project: project) }
-
- it 'returns event by status name' do
- expect(alert.status_event_for(for_status)).to eq(event)
- end
- end
+ it { is_expected.to eq(resolved_alert.ended_at) }
end
- describe '#change_status_to' do
- let_it_be_with_reload(:alert) { create(:alert_management_alert, project: project) }
+ describe '#resolved_at=' do
+ let(:resolve_time) { Time.current }
- context 'with valid statuses' do
- it 'changes the status to triggered' do
- alert.acknowledge! # change to non-triggered status
- expect { alert.change_status_to(:triggered) }.to change { alert.triggered? }.to(true)
- end
+ it 'sets ended_at' do
+ triggered_alert.resolved_at = resolve_time
- %i(acknowledged resolved ignored).each do |status|
- it "changes the status to #{status}" do
- expect { alert.change_status_to(status) }.to change { alert.public_send(:"#{status}?") }.to(true)
- end
- end
- end
-
- context 'with invalid status' do
- it 'does not change the current status' do
- expect { alert.change_status_to(nil) }.not_to change { alert.status }
- end
+ expect(triggered_alert.ended_at).to eq(resolve_time)
+ expect(triggered_alert.resolved_at).to eq(resolve_time)
end
end
end
diff --git a/spec/models/analytics/cycle_analytics/stage_event_hash_spec.rb b/spec/models/analytics/cycle_analytics/stage_event_hash_spec.rb
new file mode 100644
index 00000000000..ffddaf1e1b2
--- /dev/null
+++ b/spec/models/analytics/cycle_analytics/stage_event_hash_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::StageEventHash, type: :model do
+ let(:stage_event_hash) { described_class.create!(hash_sha256: hash_sha256) }
+ let(:hash_sha256) { 'does_not_matter' }
+
+ describe 'associations' do
+ it { is_expected.to have_many(:cycle_analytics_project_stages) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:hash_sha256) }
+ end
+
+ describe '.record_id_by_hash_sha256' do
+ it 'returns an existing id' do
+ id = stage_event_hash.id
+ same_id = described_class.record_id_by_hash_sha256(hash_sha256)
+
+ expect(same_id).to eq(id)
+ end
+
+ it 'creates a new record' do
+ expect do
+ described_class.record_id_by_hash_sha256(hash_sha256)
+ end.to change { described_class.count }.from(0).to(1)
+ end
+ end
+
+ describe '.cleanup_if_unused' do
+ it 'removes the record' do
+ described_class.cleanup_if_unused(stage_event_hash.id)
+
+ expect(described_class.find_by_id(stage_event_hash.id)).to be_nil
+ end
+
+ it 'does not remove the record' do
+ id = create(:cycle_analytics_project_stage).stage_event_hash_id
+
+ described_class.cleanup_if_unused(id)
+
+ expect(described_class.find_by_id(id)).not_to be_nil
+ end
+ end
+end
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index 85a6717d259..f9a05c720a3 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -39,13 +39,14 @@ RSpec.describe ApplicationRecord do
let(:suggestion_attributes) { attributes_for(:suggestion).merge!(note_id: note.id) }
- describe '.safe_find_or_create_by' do
+ shared_examples '.safe_find_or_create_by' do
it 'creates the suggestion avoiding race conditions' do
- expect(Suggestion).to receive(:find_or_create_by).and_raise(ActiveRecord::RecordNotUnique)
- allow(Suggestion).to receive(:find_or_create_by).and_call_original
+ existing_suggestion = double(:Suggestion)
- expect { Suggestion.safe_find_or_create_by(suggestion_attributes) }
- .to change { Suggestion.count }.by(1)
+ expect(Suggestion).to receive(:find_by).and_return(nil, existing_suggestion)
+ expect(Suggestion).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
+
+ expect(Suggestion.safe_find_or_create_by(suggestion_attributes)).to eq(existing_suggestion)
end
it 'passes a block to find_or_create_by' do
@@ -62,10 +63,8 @@ RSpec.describe ApplicationRecord do
end
end
- describe '.safe_find_or_create_by!' do
+ shared_examples '.safe_find_or_create_by!' do
it 'creates a record using safe_find_or_create_by' do
- expect(Suggestion).to receive(:find_or_create_by).and_call_original
-
expect(Suggestion.safe_find_or_create_by!(suggestion_attributes))
.to be_a(Suggestion)
end
@@ -89,6 +88,24 @@ RSpec.describe ApplicationRecord do
.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ context 'when optimized_safe_find_or_create_by is enabled' do
+ before do
+ stub_feature_flags(optimized_safe_find_or_create_by: true)
+ end
+
+ it_behaves_like '.safe_find_or_create_by'
+ it_behaves_like '.safe_find_or_create_by!'
+ end
+
+ context 'when optimized_safe_find_or_create_by is disabled' do
+ before do
+ stub_feature_flags(optimized_safe_find_or_create_by: false)
+ end
+
+ it_behaves_like '.safe_find_or_create_by'
+ it_behaves_like '.safe_find_or_create_by!'
+ end
end
describe '.underscore' do
@@ -105,6 +122,50 @@ RSpec.describe ApplicationRecord do
end
end
+ describe '.transaction', :delete do
+ it 'opens a new transaction' do
+ expect(described_class.connection.transaction_open?).to be false
+
+ Project.transaction do
+ expect(Project.connection.transaction_open?).to be true
+
+ Project.transaction(requires_new: true) do
+ expect(Project.connection.transaction_open?).to be true
+ end
+ end
+ end
+
+ it 'does not increment a counter when a transaction is not nested' do
+ expect(described_class.connection.transaction_open?).to be false
+
+ expect(::Gitlab::Database::Metrics)
+ .not_to receive(:subtransactions_increment)
+
+ Project.transaction do
+ expect(Project.connection.transaction_open?).to be true
+ end
+
+ Project.transaction(requires_new: true) do
+ expect(Project.connection.transaction_open?).to be true
+ end
+ end
+
+ it 'increments a counter when a nested transaction is created' do
+ expect(described_class.connection.transaction_open?).to be false
+
+ expect(::Gitlab::Database::Metrics)
+ .to receive(:subtransactions_increment)
+ .with('Project')
+ .once
+
+ Project.transaction do
+ Project.transaction(requires_new: true) do
+ expect(Project.connection.transaction_open?).to be true
+ end
+ end
+ end
+ end
+
describe '.with_fast_read_statement_timeout' do
context 'when the query runs faster than configured timeout' do
it 'executes the query without error' do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 80471a09bbd..e9c5ffef210 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -218,6 +218,16 @@ RSpec.describe ApplicationSetting do
end
end
+ describe 'default_branch_name validaitions' do
+ context "when javascript tags get sanitized properly" do
+ it "gets sanitized properly" do
+ setting.update!(default_branch_name: "hello<script>alert(1)</script>")
+
+ expect(setting.default_branch_name).to eq('hello')
+ end
+ end
+ end
+
describe 'spam_check_endpoint' do
context 'when spam_check_endpoint is enabled' do
before do
@@ -834,6 +844,23 @@ RSpec.describe ApplicationSetting do
end
end
+ describe '#customers_dot_jwt_signing_key' do
+ it { is_expected.not_to allow_value('').for(:customers_dot_jwt_signing_key) }
+ it { is_expected.not_to allow_value('invalid RSA key').for(:customers_dot_jwt_signing_key) }
+ it { is_expected.to allow_value(nil).for(:customers_dot_jwt_signing_key) }
+ it { is_expected.to allow_value(OpenSSL::PKey::RSA.new(1024).to_pem).for(:customers_dot_jwt_signing_key) }
+
+ it 'is encrypted' do
+ subject.customers_dot_jwt_signing_key = OpenSSL::PKey::RSA.new(1024).to_pem
+
+ aggregate_failures do
+ expect(subject.encrypted_customers_dot_jwt_signing_key).to be_present
+ expect(subject.encrypted_customers_dot_jwt_signing_key_iv).to be_present
+ expect(subject.encrypted_customers_dot_jwt_signing_key).not_to eq(subject.customers_dot_jwt_signing_key)
+ end
+ end
+ end
+
describe '#cloud_license_auth_token' do
it { is_expected.to allow_value(nil).for(:cloud_license_auth_token) }
@@ -927,7 +954,7 @@ RSpec.describe ApplicationSetting do
context 'when ApplicationSettings does not have a primary key' do
before do
- allow(ActiveRecord::Base.connection).to receive(:primary_key).with(described_class.table_name).and_return(nil)
+ allow(described_class.connection).to receive(:primary_key).with(described_class.table_name).and_return(nil)
end
it 'raises an exception' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 0c344270e0b..26abc98656e 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe Ci::Build do
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
- it { is_expected.to have_many(:trace_sections) }
it { is_expected.to have_many(:needs) }
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:job_variables) }
@@ -29,6 +28,7 @@ RSpec.describe Ci::Build do
it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_session) }
+ it { is_expected.to have_one(:trace_metadata) }
it { is_expected.to validate_presence_of(:ref) }
@@ -345,12 +345,9 @@ RSpec.describe Ci::Build do
end
describe '#stick_build_if_status_changed' do
- it 'sticks the build if the status changed' do
+ it 'sticks the build if the status changed', :db_load_balancing do
job = create(:ci_build, :pending)
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
- .and_return(true)
-
expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:stick)
.with(:build, job.id)
@@ -1105,17 +1102,6 @@ RSpec.describe Ci::Build do
end
end
- describe '#parse_trace_sections!' do
- it 'calls ExtractSectionsFromBuildTraceService' do
- expect(Ci::ExtractSectionsFromBuildTraceService)
- .to receive(:new).with(project, build.user).once.and_call_original
- expect_any_instance_of(Ci::ExtractSectionsFromBuildTraceService)
- .to receive(:execute).with(build).once
-
- build.parse_trace_sections!
- end
- end
-
describe '#trace' do
subject { build.trace }
@@ -1955,17 +1941,7 @@ RSpec.describe Ci::Build do
described_class.retry(build, user)
end
- context 'when prevent_retry_of_retried_jobs feature flag is enabled' do
- it { is_expected.not_to be_retryable }
- end
-
- context 'when prevent_retry_of_retried_jobs feature flag is disabled' do
- before do
- stub_feature_flags(prevent_retry_of_retried_jobs: false)
- end
-
- it { is_expected.to be_retryable }
- end
+ it { is_expected.not_to be_retryable }
end
end
end
@@ -2214,34 +2190,12 @@ RSpec.describe Ci::Build do
expect(build.options['image']).to be_nil
end
- context 'when ci_build_metadata_config is set' do
- before do
- stub_feature_flags(ci_build_metadata_config: true)
- end
-
- it 'persist data in build metadata' do
- expect(build.metadata.read_attribute(:config_options)).to eq(options.symbolize_keys)
- end
-
- it 'does not persist data in build' do
- expect(build.read_attribute(:options)).to be_nil
- end
+ it 'persist data in build metadata' do
+ expect(build.metadata.read_attribute(:config_options)).to eq(options.symbolize_keys)
end
- context 'when ci_build_metadata_config is disabled' do
- let(:build) { create(:ci_build, pipeline: pipeline) }
-
- before do
- stub_feature_flags(ci_build_metadata_config: false)
- end
-
- it 'persist data in build' do
- expect(build.read_attribute(:options)).to eq(options.symbolize_keys)
- end
-
- it 'does not persist data in build metadata' do
- expect(build.metadata.read_attribute(:config_options)).to be_nil
- end
+ it 'does not persist data in build' do
+ expect(build.read_attribute(:options)).to be_nil
end
context 'when options include artifacts:expose_as' do
@@ -2668,6 +2622,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_PROJECT_URL', value: project.web_url, public: true, masked: false },
{ key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true, masked: false },
{ key: 'CI_PROJECT_REPOSITORY_LANGUAGES', value: project.repository_languages.map(&:name).join(',').downcase, public: true, masked: false },
+ { key: 'CI_PROJECT_CLASSIFICATION_LABEL', value: project.external_authorization_classification_label, public: true, masked: false },
{ key: 'CI_DEFAULT_BRANCH', value: project.default_branch, public: true, masked: false },
{ key: 'CI_CONFIG_PATH', value: project.ci_config_path_or_default, public: true, masked: false },
{ key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true, masked: false },
@@ -3195,6 +3150,17 @@ RSpec.describe Ci::Build do
end
context 'when container registry is enabled' do
+ let_it_be_with_reload(:project) { create(:project, :public, :repository, group: group) }
+
+ let_it_be_with_reload(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: project.commit.id,
+ ref: project.default_branch,
+ status: 'success')
+ end
+
+ let_it_be_with_refind(:build) { create(:ci_build, pipeline: pipeline) }
+
let(:container_registry_enabled) { true }
let(:ci_registry) do
{ key: 'CI_REGISTRY', value: 'registry.example.com', public: true, masked: false }
@@ -3206,7 +3172,7 @@ RSpec.describe Ci::Build do
context 'and is disabled for project' do
before do
- project.update!(container_registry_enabled: false)
+ project.project_feature.update_column(:container_registry_access_level, ProjectFeature::DISABLED)
end
it { is_expected.to include(ci_registry) }
@@ -3215,7 +3181,16 @@ RSpec.describe Ci::Build do
context 'and is enabled for project' do
before do
- project.update!(container_registry_enabled: true)
+ project.project_feature.update_column(:container_registry_access_level, ProjectFeature::ENABLED)
+ end
+
+ it { is_expected.to include(ci_registry) }
+ it { is_expected.to include(ci_registry_image) }
+ end
+
+ context 'and is private for project' do
+ before do
+ project.project_feature.update_column(:container_registry_access_level, ProjectFeature::PRIVATE)
end
it { is_expected.to include(ci_registry) }
@@ -3613,36 +3588,14 @@ RSpec.describe Ci::Build do
end
end
- context 'when ci_build_metadata_config is set' do
- before do
- stub_feature_flags(ci_build_metadata_config: true)
- end
-
- it_behaves_like 'having consistent representation'
-
- it 'persist data in build metadata' do
- expect(build.metadata.read_attribute(:config_variables)).not_to be_nil
- end
+ it_behaves_like 'having consistent representation'
- it 'does not persist data in build' do
- expect(build.read_attribute(:yaml_variables)).to be_nil
- end
+ it 'persist data in build metadata' do
+ expect(build.metadata.read_attribute(:config_variables)).not_to be_nil
end
- context 'when ci_build_metadata_config is disabled' do
- before do
- stub_feature_flags(ci_build_metadata_config: false)
- end
-
- it_behaves_like 'having consistent representation'
-
- it 'persist data in build' do
- expect(build.read_attribute(:yaml_variables)).not_to be_nil
- end
-
- it 'does not persist data in build metadata' do
- expect(build.metadata.read_attribute(:config_variables)).to be_nil
- end
+ it 'does not persist data in build' do
+ expect(build.read_attribute(:yaml_variables)).to be_nil
end
end
@@ -3727,7 +3680,7 @@ RSpec.describe Ci::Build do
it 'ensures that it is not run in database transaction' do
expect(job.pipeline.persistent_ref).to receive(:create) do
- expect(Gitlab::Database).not_to be_inside_transaction
+ expect(Gitlab::Database.main).not_to be_inside_transaction
end
run_job_without_exception
@@ -3792,7 +3745,21 @@ RSpec.describe Ci::Build do
context 'when artifacts of depended job has been expired' do
let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it { expect(job).not_to have_valid_build_dependencies }
+ context 'when pipeline is not locked' do
+ before do
+ build.pipeline.unlocked!
+ end
+
+ it { expect(job).not_to have_valid_build_dependencies }
+ end
+
+ context 'when pipeline is locked' do
+ before do
+ build.pipeline.artifacts_locked!
+ end
+
+ it { expect(job).to have_valid_build_dependencies }
+ end
end
context 'when artifacts of depended job has been erased' do
@@ -4788,51 +4755,21 @@ RSpec.describe Ci::Build do
subject { build.send(:write_metadata_attribute, :options, :config_options, options) }
- context 'when ci_build_metadata_config is set' do
+ context 'when data in build is already set' do
before do
- stub_feature_flags(ci_build_metadata_config: true)
+ build.write_attribute(:options, existing_options)
end
- context 'when data in build is already set' do
- before do
- build.write_attribute(:options, existing_options)
- end
-
- it 'does set metadata options' do
- subject
-
- expect(build.metadata.read_attribute(:config_options)).to eq(options)
- end
-
- it 'does reset build options' do
- subject
-
- expect(build.read_attribute(:options)).to be_nil
- end
- end
- end
+ it 'does set metadata options' do
+ subject
- context 'when ci_build_metadata_config is disabled' do
- before do
- stub_feature_flags(ci_build_metadata_config: false)
+ expect(build.metadata.read_attribute(:config_options)).to eq(options)
end
- context 'when data in build metadata is already set' do
- before do
- build.ensure_metadata.write_attribute(:config_options, existing_options)
- end
-
- it 'does set metadata options' do
- subject
-
- expect(build.read_attribute(:options)).to eq(options)
- end
-
- it 'does reset build options' do
- subject
+ it 'does reset build options' do
+ subject
- expect(build.metadata.read_attribute(:config_options)).to be_nil
- end
+ expect(build.read_attribute(:options)).to be_nil
end
end
end
@@ -4842,8 +4779,24 @@ RSpec.describe Ci::Build do
let!(:pre_stage_job_invalid) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test2', stage_idx: 1) }
let!(:job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 2, options: { dependencies: %w(test1 test2) }) }
- it 'returns invalid dependencies' do
- expect(job.invalid_dependencies).to eq([pre_stage_job_invalid])
+ context 'when pipeline is locked' do
+ before do
+ build.pipeline.unlocked!
+ end
+
+ it 'returns invalid dependencies when expired' do
+ expect(job.invalid_dependencies).to eq([pre_stage_job_invalid])
+ end
+ end
+
+ context 'when pipeline is not locked' do
+ before do
+ build.pipeline.artifacts_locked!
+ end
+
+ it 'returns no invalid dependencies when expired' do
+ expect(job.invalid_dependencies).to eq([])
+ end
end
end
@@ -5267,6 +5220,14 @@ RSpec.describe Ci::Build do
end
end
+ describe '.with_project_and_metadata' do
+ it 'does not join across databases' do
+ with_cross_joins_prevented do
+ ::Ci::Build.with_project_and_metadata.to_a
+ end
+ end
+ end
+
describe '.without_coverage' do
let!(:build_with_coverage) { create(:ci_build, pipeline: pipeline, coverage: 100.0) }
diff --git a/spec/models/ci/build_trace_metadata_spec.rb b/spec/models/ci/build_trace_metadata_spec.rb
new file mode 100644
index 00000000000..42b9d5d34b6
--- /dev/null
+++ b/spec/models/ci/build_trace_metadata_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildTraceMetadata do
+ it { is_expected.to belong_to(:build) }
+ it { is_expected.to belong_to(:trace_artifact) }
+
+ it { is_expected.to validate_presence_of(:build) }
+end
diff --git a/spec/models/ci/build_trace_section_name_spec.rb b/spec/models/ci/build_trace_section_name_spec.rb
deleted file mode 100644
index b220e67d48e..00000000000
--- a/spec/models/ci/build_trace_section_name_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::BuildTraceSectionName, model: true do
- subject { build(:ci_build_trace_section_name) }
-
- it { is_expected.to belong_to(:project) }
- it { is_expected.to have_many(:trace_sections)}
-
- it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
-end
diff --git a/spec/models/ci/build_trace_section_spec.rb b/spec/models/ci/build_trace_section_spec.rb
deleted file mode 100644
index 640bd202b3a..00000000000
--- a/spec/models/ci/build_trace_section_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::BuildTraceSection, model: true do
- it { is_expected.to belong_to(:build)}
- it { is_expected.to belong_to(:project)}
- it { is_expected.to belong_to(:section_name)}
-
- it { is_expected.to validate_presence_of(:section_name) }
- it { is_expected.to validate_presence_of(:build) }
- it { is_expected.to validate_presence_of(:project) }
-end
diff --git a/spec/models/ci/build_trace_spec.rb b/spec/models/ci/build_trace_spec.rb
index 3beca0565c6..bd24e8be1ac 100644
--- a/spec/models/ci/build_trace_spec.rb
+++ b/spec/models/ci/build_trace_spec.rb
@@ -32,4 +32,14 @@ RSpec.describe Ci::BuildTrace do
{ offset: 0, content: [{ text: 'the-stream' }] }
])
end
+
+ context 'with invalid UTF-8 data' do
+ let(:data) { StringIO.new("UTF-8 dashes here: ───\n🐤🐤🐤🐤\xF0\x9F\x90\n") }
+
+ it 'returns valid UTF-8 data', :aggregate_failures do
+ expect(subject.lines[0]).to eq({ offset: 0, content: [{ text: 'UTF-8 dashes here: ───' }] } )
+ # Each of the dashes is 3 bytes, so we get 19 + 9 + 1 = 29
+ expect(subject.lines[1]).to eq({ offset: 29, content: [{ text: '🐤🐤🐤🐤�' }] } )
+ end
+ end
end
diff --git a/spec/models/ci/pending_build_spec.rb b/spec/models/ci/pending_build_spec.rb
index b64f3999232..0518c9a1652 100644
--- a/spec/models/ci/pending_build_spec.rb
+++ b/spec/models/ci/pending_build_spec.rb
@@ -8,6 +8,34 @@ RSpec.describe Ci::PendingBuild do
let(:build) { create(:ci_build, :created, pipeline: pipeline) }
+ describe 'associations' do
+ it { is_expected.to belong_to :project }
+ it { is_expected.to belong_to :build }
+ it { is_expected.to belong_to :namespace }
+ end
+
+ describe 'scopes' do
+ describe '.with_instance_runners' do
+ subject(:pending_builds) { described_class.with_instance_runners }
+
+ let!(:pending_build_1) { create(:ci_pending_build, instance_runners_enabled: false) }
+
+ context 'when pending builds cannot be picked up by runner' do
+ it 'returns an empty collection of pending builds' do
+ expect(pending_builds).to be_empty
+ end
+ end
+
+ context 'when pending builds can be picked up by runner' do
+ let!(:pending_build_2) { create(:ci_pending_build) }
+
+ it 'returns matching pending builds' do
+ expect(pending_builds).to contain_exactly(pending_build_2)
+ end
+ end
+ end
+ end
+
describe '.upsert_from_build!' do
context 'another pending entry does not exist' do
it 'creates a new pending entry' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 74a476a6422..da89eccc3b2 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -263,6 +263,20 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '.with_pipeline_source' do
+ subject { described_class.with_pipeline_source(source) }
+
+ let(:source) { 'web' }
+
+ let_it_be(:push_pipeline) { create(:ci_pipeline, source: :push) }
+ let_it_be(:web_pipeline) { create(:ci_pipeline, source: :web) }
+ let_it_be(:api_pipeline) { create(:ci_pipeline, source: :api) }
+
+ it 'contains pipelines created due to specified source' do
+ expect(subject).to contain_exactly(web_pipeline)
+ end
+ end
+
describe '.ci_sources' do
subject { described_class.ci_sources }
@@ -2263,18 +2277,38 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '.latest_successful_for_refs' do
- let!(:latest_successful_pipeline1) do
- create_pipeline(:success, 'ref1', 'D')
- end
+ subject(:latest_successful_for_refs) { described_class.latest_successful_for_refs(refs) }
- let!(:latest_successful_pipeline2) do
- create_pipeline(:success, 'ref2', 'D')
+ context 'when refs are specified' do
+ let(:refs) { %w(first_ref second_ref third_ref) }
+
+ before do
+ create(:ci_empty_pipeline, id: 1001, status: :success, ref: 'first_ref', sha: 'sha')
+ create(:ci_empty_pipeline, id: 1002, status: :success, ref: 'second_ref', sha: 'sha')
+ end
+
+ let!(:latest_successful_pipeline_for_first_ref) do
+ create(:ci_empty_pipeline, id: 2001, status: :success, ref: 'first_ref', sha: 'sha')
+ end
+
+ let!(:latest_successful_pipeline_for_second_ref) do
+ create(:ci_empty_pipeline, id: 2002, status: :success, ref: 'second_ref', sha: 'sha')
+ end
+
+ it 'returns the latest successful pipeline for both refs' do
+ expect(latest_successful_for_refs).to eq({
+ 'first_ref' => latest_successful_pipeline_for_first_ref,
+ 'second_ref' => latest_successful_pipeline_for_second_ref
+ })
+ end
end
- it 'returns the latest successful pipeline for both refs' do
- refs = %w(ref1 ref2 ref3)
+ context 'when no refs are specified' do
+ let(:refs) { [] }
- expect(described_class.latest_successful_for_refs(refs)).to eq({ 'ref1' => latest_successful_pipeline1, 'ref2' => latest_successful_pipeline2 })
+ it 'returns an empty relation whenno refs are specified' do
+ expect(latest_successful_for_refs).to be_empty
+ end
end
end
end
diff --git a/spec/models/ci/resource_spec.rb b/spec/models/ci/resource_spec.rb
index 5574f6f82b2..e883d704768 100644
--- a/spec/models/ci/resource_spec.rb
+++ b/spec/models/ci/resource_spec.rb
@@ -15,6 +15,22 @@ RSpec.describe Ci::Resource do
end
end
+ describe '.retained' do
+ subject { described_class.retained }
+
+ it "returns the resource if it's retained" do
+ resource = create(:ci_resource, processable: create(:ci_build))
+
+ is_expected.to eq([resource])
+ end
+
+ it "returns empty if it's not retained" do
+ create(:ci_resource, processable: nil)
+
+ is_expected.to be_empty
+ end
+ end
+
describe '.retained_by' do
subject { described_class.retained_by(build) }
@@ -25,4 +41,40 @@ RSpec.describe Ci::Resource do
is_expected.to eq([resource])
end
end
+
+ describe '.stale_processables' do
+ subject { resource_group.resources.stale_processables }
+
+ let!(:resource_group) { create(:ci_resource_group) }
+ let!(:resource) { create(:ci_resource, processable: build, resource_group: resource_group) }
+
+ context 'when the processable is running' do
+ let!(:build) { create(:ci_build, :running, resource_group: resource_group) }
+
+ before do
+ # Creating unrelated builds to make sure the `retained` scope is working
+ create(:ci_build, :running, resource_group: resource_group)
+ end
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+
+ context 'and doomed' do
+ before do
+ build.doom!
+ end
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+
+ it 'returns the stale prosessable a few minutes later' do
+ travel_to(10.minutes.since) do
+ is_expected.to eq([build])
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/runner_namespace_spec.rb b/spec/models/ci/runner_namespace_spec.rb
index 41d805adb9f..4e7cf7a3cb3 100644
--- a/spec/models/ci/runner_namespace_spec.rb
+++ b/spec/models/ci/runner_namespace_spec.rb
@@ -4,6 +4,12 @@ require 'spec_helper'
RSpec.describe Ci::RunnerNamespace do
it_behaves_like 'includes Limitable concern' do
+ before do
+ skip_default_enabled_yaml_check
+
+ stub_feature_flags(ci_runner_limits_override: false)
+ end
+
subject { build(:ci_runner_namespace, group: create(:group, :nested), runner: create(:ci_runner, :group)) }
end
end
diff --git a/spec/models/ci/runner_project_spec.rb b/spec/models/ci/runner_project_spec.rb
index 13369dba2cf..fef1416a84a 100644
--- a/spec/models/ci/runner_project_spec.rb
+++ b/spec/models/ci/runner_project_spec.rb
@@ -4,6 +4,12 @@ require 'spec_helper'
RSpec.describe Ci::RunnerProject do
it_behaves_like 'includes Limitable concern' do
+ before do
+ skip_default_enabled_yaml_check
+
+ stub_feature_flags(ci_runner_limits_override: false)
+ end
+
subject { build(:ci_runner_project, project: create(:project), runner: create(:ci_runner, :project)) }
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 69b4d752f4c..a951af4cc4f 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -79,6 +79,15 @@ RSpec.describe CommitStatus do
end
end
+ describe '.updated_at_before' do
+ it 'finds the relevant records' do
+ status = create(:commit_status, updated_at: 1.day.ago, project: project)
+ create(:commit_status, updated_at: 1.day.since, project: project)
+
+ expect(described_class.updated_at_before(Time.current)).to eq([status])
+ end
+ end
+
describe '.updated_before' do
let!(:lookback) { 5.days.ago }
let!(:timeout) { 1.day.ago }
diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb
index 7cf7b825d7d..269f9577267 100644
--- a/spec/models/concerns/case_sensitivity_spec.rb
+++ b/spec/models/concerns/case_sensitivity_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe CaseSensitivity do
describe '.iwhere' do
- let_it_be(:connection) { ActiveRecord::Base.connection }
+ let_it_be(:connection) { Namespace.connection }
let_it_be(:model) do
Class.new(ActiveRecord::Base) do
include CaseSensitivity
diff --git a/spec/models/concerns/ci/has_status_spec.rb b/spec/models/concerns/ci/has_status_spec.rb
index b16420bc658..0709a050056 100644
--- a/spec/models/concerns/ci/has_status_spec.rb
+++ b/spec/models/concerns/ci/has_status_spec.rb
@@ -351,6 +351,18 @@ RSpec.describe Ci::HasStatus do
it_behaves_like 'not containing the job', status
end
end
+
+ describe '.complete' do
+ subject { CommitStatus.complete }
+
+ described_class::COMPLETED_STATUSES.each do |status|
+ it_behaves_like 'containing the job', status
+ end
+
+ described_class::ACTIVE_STATUSES.each do |status|
+ it_behaves_like 'not containing the job', status
+ end
+ end
end
describe '::DEFAULT_STATUS' do
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index 5f4e5d4bd98..f1fb4fcbd03 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe EachBatch do
include EachBatch
self.table_name = 'users'
+
+ scope :never_signed_in, -> { where(sign_in_count: 0) }
end
end
@@ -72,5 +74,16 @@ RSpec.describe EachBatch do
expect(ids).to eq(ids.sort.reverse)
end
+
+ describe 'current scope' do
+ let(:entry) { create(:user, sign_in_count: 1) }
+ let(:ids_with_new_relation) { model.where(id: entry.id).pluck(:id) }
+
+ it 'does not leak current scope to block being executed' do
+ model.never_signed_in.each_batch(of: 5) do |relation|
+ expect(ids_with_new_relation).to include(entry.id)
+ end
+ end
+ end
end
end
diff --git a/spec/models/concerns/has_integrations_spec.rb b/spec/models/concerns/has_integrations_spec.rb
index 6b3f75bfcfd..ea6b0e69209 100644
--- a/spec/models/concerns/has_integrations_spec.rb
+++ b/spec/models/concerns/has_integrations_spec.rb
@@ -17,14 +17,6 @@ RSpec.describe HasIntegrations do
create(:integrations_slack, project: project_4, inherit_from_id: nil)
end
- describe '.with_custom_integration_for' do
- it 'returns projects with custom integrations' do
- # We use pagination to verify that the group is excluded from the query
- expect(Project.with_custom_integration_for(instance_integration, 0, 2)).to contain_exactly(project_2, project_3)
- expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2, project_3)
- end
- end
-
describe '.without_integration' do
it 'returns projects without integration' do
expect(Project.without_integration(instance_integration)).to contain_exactly(project_4)
diff --git a/spec/models/concerns/limitable_spec.rb b/spec/models/concerns/limitable_spec.rb
index 6b25ed39efb..850282d54c7 100644
--- a/spec/models/concerns/limitable_spec.rb
+++ b/spec/models/concerns/limitable_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'active_model'
+require 'spec_helper'
RSpec.describe Limitable do
let(:minimal_test_class) do
@@ -17,7 +16,7 @@ RSpec.describe Limitable do
end
before do
- stub_const("MinimalTestClass", minimal_test_class)
+ stub_const('MinimalTestClass', minimal_test_class)
end
it { expect(MinimalTestClass.limit_name).to eq('test_classes') }
@@ -37,25 +36,50 @@ RSpec.describe Limitable do
instance.valid?(:create)
end
- context 'with custom relation' do
- before do
- MinimalTestClass.limit_relation = :custom_relation
+ context 'with custom relation and feature flags' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:limit_feature_flag, :limit_feature_flag_value, :limit_feature_flag_for_override, :limit_feature_flag_override_value, :expect_limit_applied?) do
+ nil | nil | nil | nil | true
+ :some_feature_flag | false | nil | nil | false
+ :some_feature_flag | true | nil | nil | true
+ :some_feature_flag | true | :some_feature_flag_disable | false | true
+ :some_feature_flag | false | :some_feature_flag_disable | false | false
+ :some_feature_flag | false | :some_feature_flag_disable | true | false
+ :some_feature_flag | true | :some_feature_flag_disable | true | false
end
- it 'triggers custom limit_relation' do
- instance = MinimalTestClass.new
+ with_them do
+ let(:instance) { MinimalTestClass.new }
- def instance.project
- @project ||= Object.new
- end
+ before do
+ def instance.project
+ @project ||= stub_feature_flag_gate('CustomActor')
+ end
+
+ stub_feature_flags("#{limit_feature_flag}": limit_feature_flag_value ? [instance.project] : false) if limit_feature_flag
+ stub_feature_flags("#{limit_feature_flag_for_override}": limit_feature_flag_override_value ? [instance.project] : false) if limit_feature_flag_for_override
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
- limits = Object.new
- custom_relation = Object.new
- expect(instance).to receive(:custom_relation).and_return(custom_relation)
- expect(instance.project).to receive(:actual_limits).and_return(limits)
- expect(limits).to receive(:exceeded?).with(instance.class.name.demodulize.tableize, custom_relation).and_return(false)
+ MinimalTestClass.limit_relation = :custom_relation
+ MinimalTestClass.limit_feature_flag = limit_feature_flag
+ MinimalTestClass.limit_feature_flag_for_override = limit_feature_flag_for_override
+ end
- instance.valid?(:create)
+ it 'acts according to the feature flag settings' do
+ limits = Object.new
+ custom_relation = Object.new
+ if expect_limit_applied?
+ expect(instance).to receive(:custom_relation).and_return(custom_relation)
+ expect(instance.project).to receive(:actual_limits).and_return(limits)
+ expect(limits).to receive(:exceeded?).with(instance.class.name.demodulize.tableize, custom_relation).and_return(false)
+ else
+ expect(instance).not_to receive(:custom_relation)
+ end
+
+ instance.valid?(:create)
+ end
end
end
end
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index cfa00bab025..f1ae89f33af 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -70,8 +70,8 @@ RSpec.describe Sortable do
it 'ascending' do
expect(relation).to receive(:reorder).once.and_call_original
- table = Regexp.escape(ActiveRecord::Base.connection.quote_table_name(:namespaces))
- column = Regexp.escape(ActiveRecord::Base.connection.quote_column_name(:name))
+ table = Regexp.escape(ApplicationRecord.connection.quote_table_name(:namespaces))
+ column = Regexp.escape(ApplicationRecord.connection.quote_column_name(:name))
sql = relation.order_by('name_asc').to_sql
@@ -81,8 +81,8 @@ RSpec.describe Sortable do
it 'descending' do
expect(relation).to receive(:reorder).once.and_call_original
- table = Regexp.escape(ActiveRecord::Base.connection.quote_table_name(:namespaces))
- column = Regexp.escape(ActiveRecord::Base.connection.quote_column_name(:name))
+ table = Regexp.escape(ApplicationRecord.connection.quote_table_name(:namespaces))
+ column = Regexp.escape(ApplicationRecord.connection.quote_column_name(:name))
sql = relation.order_by('name_desc').to_sql
diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb
index 3c5f3b2d2ad..5edaab56e2d 100644
--- a/spec/models/concerns/spammable_spec.rb
+++ b/spec/models/concerns/spammable_spec.rb
@@ -28,11 +28,11 @@ RSpec.describe Spammable do
it 'returns true for public project' do
issue.project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
- expect(issue.check_for_spam?).to eq(true)
+ expect(issue.check_for_spam?(user: issue.author)).to eq(true)
end
it 'returns false for other visibility levels' do
- expect(issue.check_for_spam?).to eq(false)
+ expect(issue.check_for_spam?(user: issue.author)).to eq(false)
end
end
diff --git a/spec/models/concerns/strip_attribute_spec.rb b/spec/models/concerns/strip_attribute_spec.rb
index 812f0a015f7..4357bc93361 100644
--- a/spec/models/concerns/strip_attribute_spec.rb
+++ b/spec/models/concerns/strip_attribute_spec.rb
@@ -5,12 +5,12 @@ require 'spec_helper'
RSpec.describe StripAttribute do
let(:milestone) { create(:milestone) }
- describe ".strip_attributes" do
- it { expect(Milestone).to respond_to(:strip_attributes) }
+ describe ".strip_attributes!" do
+ it { expect(Milestone).to respond_to(:strip_attributes!) }
it { expect(Milestone.strip_attrs).to include(:title) }
end
- describe "#strip_attributes" do
+ describe "#strip_attributes!" do
before do
milestone.title = ' 8.3 '
milestone.valid?
diff --git a/spec/models/concerns/vulnerability_finding_signature_helpers_spec.rb b/spec/models/concerns/vulnerability_finding_signature_helpers_spec.rb
new file mode 100644
index 00000000000..0a71699971e
--- /dev/null
+++ b/spec/models/concerns/vulnerability_finding_signature_helpers_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VulnerabilityFindingSignatureHelpers do
+ let(:cls) do
+ Class.new do
+ include VulnerabilityFindingSignatureHelpers
+ attr_accessor :algorithm_type
+
+ def initialize(algorithm_type)
+ @algorithm_type = algorithm_type
+ end
+ end
+ end
+
+ describe '#priority' do
+ it 'returns numeric values of the priority string' do
+ expect(cls.new('scope_offset').priority).to eq(3)
+ expect(cls.new('location').priority).to eq(2)
+ expect(cls.new('hash').priority).to eq(1)
+ end
+ end
+
+ describe '#self.priority' do
+ it 'returns the numeric value of the provided string' do
+ expect(cls.priority('scope_offset')).to eq(3)
+ expect(cls.priority('location')).to eq(2)
+ expect(cls.priority('hash')).to eq(1)
+ end
+ end
+end
diff --git a/spec/models/concerns/where_composite_spec.rb b/spec/models/concerns/where_composite_spec.rb
index fb23e6bfe1d..5e67f2f5b65 100644
--- a/spec/models/concerns/where_composite_spec.rb
+++ b/spec/models/concerns/where_composite_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe WhereComposite do
let(:model) do
tbl_name = test_table_name
- Class.new(ActiveRecord::Base) do
+ Class.new(ApplicationRecord) do
self.table_name = tbl_name
include WhereComposite
@@ -16,7 +16,7 @@ RSpec.describe WhereComposite do
end
def connection
- ActiveRecord::Base.connection
+ ApplicationRecord.connection
end
before_all do
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index a53db07cc59..846dfb30928 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -323,7 +323,7 @@ RSpec.describe ContainerRepository do
context 'with a subgroup' do
let_it_be(:test_group) { create(:group) }
let_it_be(:another_project) { create(:project, path: 'test', group: test_group) }
- let_it_be(:project3) { create(:project, path: 'test3', group: test_group, container_registry_enabled: false) }
+ let_it_be(:project3) { create(:project, :container_registry_disabled, path: 'test3', group: test_group) }
let_it_be(:another_repository) do
create(:container_repository, name: 'my_image', project: another_project)
diff --git a/spec/models/customer_relations/organization_spec.rb b/spec/models/customer_relations/organization_spec.rb
new file mode 100644
index 00000000000..b79b5748156
--- /dev/null
+++ b/spec/models/customer_relations/organization_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Organization, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:group).with_foreign_key('group_id') }
+ end
+
+ describe 'validations' do
+ subject { create(:organization) }
+
+ it { is_expected.to validate_presence_of(:group) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name).case_insensitive.scoped_to([:group_id]) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ end
+
+ describe '#name' do
+ it 'strips name' do
+ organization = described_class.new(name: ' GitLab ')
+ organization.valid?
+
+ expect(organization.name).to eq('GitLab')
+ end
+ end
+
+ describe '#find_by_name' do
+ let!(:group) { create(:group) }
+ let!(:organiztion1) { create(:organization, group: group, name: 'Test') }
+ let!(:organiztion2) { create(:organization, group: create(:group), name: 'Test') }
+
+ it 'strips name' do
+ expect(described_class.find_by_name(group.id, 'TEST')).to eq([organiztion1])
+ end
+ end
+end
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index c9f7895a616..88451307efb 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -22,6 +22,32 @@ RSpec.describe DeployToken do
it { is_expected.to validate_presence_of(:deploy_token_type) }
end
+ shared_examples 'invalid group deploy token' do
+ context 'revoked' do
+ before do
+ deploy_token.update_column(:revoked, true)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'expired' do
+ before do
+ deploy_token.update!(expires_at: Date.today - 1.month)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'project type' do
+ before do
+ deploy_token.update_column(:deploy_token_type, 2)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
describe 'deploy_token_type validations' do
context 'when a deploy token is associated to a group' do
it 'does not allow setting a project to it' do
@@ -70,6 +96,50 @@ RSpec.describe DeployToken do
end
end
+ describe '#valid_for_dependency_proxy?' do
+ let_it_be_with_reload(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+
+ subject { deploy_token.valid_for_dependency_proxy? }
+
+ it { is_expected.to eq(true) }
+
+ it_behaves_like 'invalid group deploy token'
+
+ context 'insufficient scopes' do
+ before do
+ deploy_token.update_column(:write_registry, false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#has_access_to_group?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_reload(:deploy_token) { create(:deploy_token, :group) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, group: group, deploy_token: deploy_token) }
+
+ let(:test_group) { group }
+
+ subject { deploy_token.has_access_to_group?(test_group) }
+
+ it { is_expected.to eq(true) }
+
+ it_behaves_like 'invalid group deploy token'
+
+ context 'for a sub group' do
+ let(:test_group) { create(:group, parent: group) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'for a different group' do
+ let(:test_group) { create(:group) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
describe '#scopes' do
context 'with all the scopes' do
let_it_be(:deploy_token) { create(:deploy_token, :all_scopes) }
diff --git a/spec/models/diff_discussion_spec.rb b/spec/models/diff_discussion_spec.rb
index 998204626d3..7a57f895b8a 100644
--- a/spec/models/diff_discussion_spec.rb
+++ b/spec/models/diff_discussion_spec.rb
@@ -128,11 +128,20 @@ RSpec.describe DiffDiscussion do
end
describe '#cache_key' do
+ let(:notes_sha) { Digest::SHA1.hexdigest("#{diff_note.post_processed_cache_key}") }
+ let(:position_sha) { Digest::SHA1.hexdigest(diff_note.position.to_json) }
+
it 'returns the cache key with the position sha' do
- notes_sha = Digest::SHA1.hexdigest("#{diff_note.id}")
- position_sha = Digest::SHA1.hexdigest(diff_note.position.to_json)
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{subject.id}:#{notes_sha}::#{position_sha}:")
+ end
- expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{diff_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{diff_note.updated_at}::#{position_sha}")
+ context 'when first note of discussion has diff_note_position' do
+ let!(:diff_note_position) { create(:diff_note_position, note: diff_note) }
+ let(:positions_sha) { Digest::SHA1.hexdigest(diff_note_position.position.to_json) }
+
+ it 'includes sha of diff_note_positions position' do
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{subject.id}:#{notes_sha}::#{position_sha}:#{positions_sha}")
+ end
end
end
end
diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb
index 2b33de96e04..212619a1c3d 100644
--- a/spec/models/discussion_spec.rb
+++ b/spec/models/discussion_spec.rb
@@ -53,10 +53,10 @@ RSpec.describe Discussion do
end
describe '#cache_key' do
- let(:notes_sha) { Digest::SHA1.hexdigest("#{first_note.id}:#{second_note.id}:#{third_note.id}") }
+ let(:notes_sha) { Digest::SHA1.hexdigest("#{first_note.post_processed_cache_key}:#{second_note.post_processed_cache_key}:#{third_note.post_processed_cache_key}") }
- it 'returns the cache key with ID and latest updated note updated at' do
- expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{third_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{third_note.updated_at}:")
+ it 'returns the cache key' do
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{subject.id}:#{notes_sha}:")
end
context 'when discussion is resolved' do
@@ -65,7 +65,7 @@ RSpec.describe Discussion do
end
it 'returns the cache key with resolved at' do
- expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{third_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{third_note.updated_at}:#{subject.resolved_at}")
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{subject.id}:#{notes_sha}:#{subject.resolved_at}")
end
end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 18a172b72d7..53561586d61 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
subject(:environment) { create(:environment, project: project) }
it { is_expected.to be_kind_of(ReactiveCaching) }
+ it { is_expected.to nullify_if_blank(:external_url) }
it { is_expected.to belong_to(:project).required }
it { is_expected.to have_many(:deployments) }
@@ -214,6 +215,24 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe '.auto_deletable' do
+ subject { described_class.auto_deletable(limit) }
+
+ let(:limit) { 100 }
+
+ context 'when environment is auto-deletable' do
+ let!(:environment) { create(:environment, :auto_deletable) }
+
+ it { is_expected.to eq([environment]) }
+ end
+
+ context 'when environment is not auto-deletable' do
+ let!(:environment) { create(:environment) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.stop_actions' do
subject { environments.stop_actions }
@@ -412,15 +431,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
- describe '#nullify_external_url' do
- it 'replaces a blank url with nil' do
- env = build(:environment, external_url: "")
-
- expect(env.save).to be true
- expect(env.external_url).to be_nil
- end
- end
-
describe '#includes_commit?' do
let(:project) { create(:project, :repository) }
diff --git a/spec/models/error_tracking/client_key_spec.rb b/spec/models/error_tracking/client_key_spec.rb
new file mode 100644
index 00000000000..54176a32f63
--- /dev/null
+++ b/spec/models/error_tracking/client_key_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::ClientKey, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:public_key) }
+ it { is_expected.to validate_length_of(:public_key).is_at_most(255) }
+ end
+
+ describe '#generate_key' do
+ it { expect(subject.public_key).to be_present }
+ it { expect(subject.public_key).to start_with('glet_') }
+ end
+end
diff --git a/spec/models/error_tracking/error_event_spec.rb b/spec/models/error_tracking/error_event_spec.rb
index 331661f88cc..8e20eb25353 100644
--- a/spec/models/error_tracking/error_event_spec.rb
+++ b/spec/models/error_tracking/error_event_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ErrorTracking::ErrorEvent, type: :model do
+ let_it_be(:event) { create(:error_tracking_error_event) }
+
describe 'relationships' do
it { is_expected.to belong_to(:error) }
end
@@ -11,4 +13,33 @@ RSpec.describe ErrorTracking::ErrorEvent, type: :model do
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to validate_presence_of(:occurred_at) }
end
+
+ describe '#stacktrace' do
+ it 'generates a correct stacktrace in expected format' do
+ expected_context = [
+ [132, " end\n"],
+ [133, "\n"],
+ [134, " begin\n"],
+ [135, " block.call(work, *extra)\n"],
+ [136, " rescue Exception => e\n"],
+ [137, " STDERR.puts \"Error reached top of thread-pool: #\{e.message\} (#\{e.class\})\"\n"],
+ [138, " end\n"]
+ ]
+
+ expected_entry = {
+ 'lineNo' => 135,
+ 'context' => expected_context,
+ 'filename' => 'puma/thread_pool.rb',
+ 'function' => 'block in spawn_thread',
+ 'colNo' => 0
+ }
+
+ expect(event.stacktrace).to be_kind_of(Array)
+ expect(event.stacktrace.first).to eq(expected_entry)
+ end
+ end
+
+ describe '#to_sentry_error_event' do
+ it { expect(event.to_sentry_error_event).to be_kind_of(Gitlab::ErrorTracking::ErrorEvent) }
+ end
end
diff --git a/spec/models/error_tracking/error_spec.rb b/spec/models/error_tracking/error_spec.rb
index 8591802d15c..57899985daf 100644
--- a/spec/models/error_tracking/error_spec.rb
+++ b/spec/models/error_tracking/error_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ErrorTracking::Error, type: :model do
+ let_it_be(:error) { create(:error_tracking_error) }
+
describe 'relationships' do
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:events) }
@@ -13,4 +15,16 @@ RSpec.describe ErrorTracking::Error, type: :model do
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to validate_presence_of(:actor) }
end
+
+ describe '#title' do
+ it { expect(error.title).to eq('ActionView::MissingTemplate Missing template posts/edit') }
+ end
+
+ describe '#to_sentry_error' do
+ it { expect(error.to_sentry_error).to be_kind_of(Gitlab::ErrorTracking::Error) }
+ end
+
+ describe '#to_sentry_detailed_error' do
+ it { expect(error.to_sentry_detailed_error).to be_kind_of(Gitlab::ErrorTracking::DetailedError) }
+ end
end
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 3ae0666f7d0..7be61f4950e 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -54,20 +54,22 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
valid_api_url = 'http://example.com/api/0/projects/org-slug/proj-slug/'
valid_token = 'token'
- where(:enabled, :token, :api_url, :valid?) do
- true | nil | nil | false
- true | nil | valid_api_url | false
- true | valid_token | nil | false
- true | valid_token | valid_api_url | true
- false | nil | nil | true
- false | nil | valid_api_url | true
- false | valid_token | nil | true
- false | valid_token | valid_api_url | true
+ where(:enabled, :integrated, :token, :api_url, :valid?) do
+ true | true | nil | nil | true
+ true | false | nil | nil | false
+ true | false | nil | valid_api_url | false
+ true | false | valid_token | nil | false
+ true | false | valid_token | valid_api_url | true
+ false | false | nil | nil | true
+ false | false | nil | valid_api_url | true
+ false | false | valid_token | nil | true
+ false | false | valid_token | valid_api_url | true
end
with_them do
before do
subject.enabled = enabled
+ subject.integrated = integrated
subject.token = token
subject.api_url = api_url
end
@@ -241,7 +243,7 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
describe '#issue_details' do
- let(:issue) { build(:detailed_error_tracking_error) }
+ let(:issue) { build(:error_tracking_sentry_detailed_error) }
let(:sentry_client) { double('sentry_client', issue_details: issue) }
let(:commit_id) { issue.first_release_version }
@@ -472,4 +474,25 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
expect(subject.list_sentry_issues(params)).to eq(nil)
end
end
+
+ describe '#sentry_enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:enabled, :integrated, :feature_flag, :sentry_enabled) do
+ true | false | false | true
+ true | true | false | true
+ true | true | true | false
+ false | false | false | false
+ end
+
+ with_them do
+ before do
+ subject.enabled = enabled
+ subject.integrated = integrated
+ stub_feature_flags(integrated_error_tracking: feature_flag)
+ end
+
+ it { expect(subject.sentry_enabled).to eq(sentry_enabled) }
+ end
+ end
end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index fc229dcaa22..41510b7aa1c 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -982,9 +982,9 @@ RSpec.describe Event do
build(:design_event, trait).action_name
end
- expect(created).to eq('uploaded')
- expect(updated).to eq('revised')
- expect(destroyed).to eq('deleted')
+ expect(created).to eq('added')
+ expect(updated).to eq('updated')
+ expect(destroyed).to eq('removed')
end
it 'handles correct push_action' do
diff --git a/spec/models/experiment_spec.rb b/spec/models/experiment_spec.rb
index 7f0d1e69924..ea5d2b27028 100644
--- a/spec/models/experiment_spec.rb
+++ b/spec/models/experiment_spec.rb
@@ -258,7 +258,7 @@ RSpec.describe Experiment do
let(:variant) { :experimental }
it 'does not initiate a transaction' do
- expect(ActiveRecord::Base.connection).not_to receive(:transaction)
+ expect(Experiment.connection).not_to receive(:transaction)
subject
end
@@ -360,7 +360,7 @@ RSpec.describe Experiment do
let(:context) { {} }
it 'does not initiate a transaction' do
- expect(ActiveRecord::Base.connection).not_to receive(:transaction)
+ expect(Experiment.connection).not_to receive(:transaction)
subject
end
diff --git a/spec/models/gpg_signature_spec.rb b/spec/models/gpg_signature_spec.rb
index 997d9bbec72..7a1799c670e 100644
--- a/spec/models/gpg_signature_spec.rb
+++ b/spec/models/gpg_signature_spec.rb
@@ -54,8 +54,10 @@ RSpec.describe GpgSignature do
end
it 'does not raise an error in case of a race condition' do
- expect(described_class).to receive(:find_or_create_by).and_raise(ActiveRecord::RecordNotUnique)
- allow(described_class).to receive(:find_or_create_by).and_call_original
+ expect(described_class).to receive(:find_by).and_return(nil, double(described_class, persisted?: true))
+
+ expect(described_class).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
+ allow(described_class).to receive(:create).and_call_original
described_class.safe_create!(attributes)
end
diff --git a/spec/models/group_deploy_token_spec.rb b/spec/models/group_deploy_token_spec.rb
index d38abafa7ed..bc44c473ddb 100644
--- a/spec/models/group_deploy_token_spec.rb
+++ b/spec/models/group_deploy_token_spec.rb
@@ -3,15 +3,40 @@
require 'spec_helper'
RSpec.describe GroupDeployToken, type: :model do
- let(:group) { create(:group) }
- let(:deploy_token) { create(:deploy_token) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:deploy_token) { create(:deploy_token) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, group: group, deploy_token: deploy_token) }
- subject(:group_deploy_token) { create(:group_deploy_token, group: group, deploy_token: deploy_token) }
+ describe 'relationships' do
+ it { is_expected.to belong_to :group }
+ it { is_expected.to belong_to :deploy_token }
+ end
- it { is_expected.to belong_to :group }
- it { is_expected.to belong_to :deploy_token }
+ describe 'validation' do
+ it { is_expected.to validate_presence_of :deploy_token }
+ it { is_expected.to validate_presence_of :group }
+ it { is_expected.to validate_uniqueness_of(:deploy_token_id).scoped_to(:group_id) }
+ end
- it { is_expected.to validate_presence_of :deploy_token }
- it { is_expected.to validate_presence_of :group }
- it { is_expected.to validate_uniqueness_of(:deploy_token_id).scoped_to(:group_id) }
+ describe '#has_access_to_group?' do
+ subject { group_deploy_token.has_access_to_group?(test_group) }
+
+ context 'for itself' do
+ let(:test_group) { group }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'for a subgroup' do
+ let(:test_group) { create(:group, parent: group) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'for other group' do
+ let(:test_group) { create(:group) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 0a08b15a1eb..ddf12c8e4c4 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -640,6 +640,12 @@ RSpec.describe Group do
it { is_expected.to match_array([private_group, internal_group]) }
end
+ describe 'private_only' do
+ subject { described_class.private_only.to_a }
+
+ it { is_expected.to match_array([private_group]) }
+ end
+
describe 'with_onboarding_progress' do
subject { described_class.with_onboarding_progress }
@@ -2598,6 +2604,21 @@ RSpec.describe Group do
it { is_expected.to eq(Set.new([child_1.id])) }
end
+ describe '.timelogs' do
+ let(:project) { create(:project, namespace: group) }
+ let(:issue) { create(:issue, project: project) }
+ let(:other_project) { create(:project, namespace: create(:group)) }
+ let(:other_issue) { create(:issue, project: other_project) }
+
+ let!(:timelog1) { create(:timelog, issue: issue) }
+ let!(:timelog2) { create(:timelog, issue: other_issue) }
+ let!(:timelog3) { create(:timelog, issue: issue) }
+
+ it 'returns timelogs belonging to the group' do
+ expect(group.timelogs).to contain_exactly(timelog1, timelog3)
+ end
+ end
+
describe '#to_ability_name' do
it 'returns group' do
group = build(:group)
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index a99263078b3..17cb5da977a 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -16,6 +16,21 @@ RSpec.describe SystemHook do
end
end
+ describe 'validations' do
+ describe 'url' do
+ let(:url) { 'http://localhost:9000' }
+
+ it { is_expected.not_to allow_value(url).for(:url) }
+
+ it 'is valid if application settings allow local requests from system hooks' do
+ settings = ApplicationSetting.new(allow_local_requests_from_system_hooks: true)
+ allow(ApplicationSetting).to receive(:current).and_return(settings)
+
+ is_expected.to allow_value(url).for(:url)
+ end
+ end
+ end
+
describe "execute", :sidekiq_might_not_need_inline do
let(:system_hook) { create(:system_hook) }
let(:user) { create(:user) }
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 1761b537dc0..c68ad3bf0c4 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -32,6 +32,19 @@ RSpec.describe WebHook do
it { is_expected.not_to allow_value('ftp://example.com').for(:url) }
it { is_expected.not_to allow_value('herp-and-derp').for(:url) }
+ context 'when url is local' do
+ let(:url) { 'http://localhost:9000' }
+
+ it { is_expected.not_to allow_value(url).for(:url) }
+
+ it 'is valid if application settings allow local requests from web hooks' do
+ settings = ApplicationSetting.new(allow_local_requests_from_web_hooks_and_services: true)
+ allow(ApplicationSetting).to receive(:current).and_return(settings)
+
+ is_expected.to allow_value(url).for(:url)
+ end
+ end
+
it 'strips :url before saving it' do
hook.url = ' https://example.com '
hook.save!
@@ -267,6 +280,15 @@ RSpec.describe WebHook do
end
end
+ shared_examples 'is tolerant of invalid records' do
+ specify do
+ hook.url = nil
+
+ expect(hook).to be_invalid
+ run_expectation
+ end
+ end
+
describe '#enable!' do
it 'makes a hook executable if it was marked as failed' do
hook.recent_failures = 1000
@@ -281,15 +303,17 @@ RSpec.describe WebHook do
end
it 'does not update hooks unless necessary' do
- expect(hook).not_to receive(:update!)
+ sql_count = ActiveRecord::QueryRecorder.new { hook.enable! }.count
- hook.enable!
+ expect(sql_count).to eq(0)
end
- it 'is idempotent on executable hooks' do
- expect(hook).not_to receive(:update!)
+ include_examples 'is tolerant of invalid records' do
+ def run_expectation
+ hook.recent_failures = 1000
- expect { hook.enable! }.not_to change(hook, :executable?)
+ expect { hook.enable! }.to change(hook, :executable?).from(false).to(true)
+ end
end
end
@@ -307,6 +331,12 @@ RSpec.describe WebHook do
expect { hook.backoff! }.not_to change(hook, :backoff_count)
end
+
+ include_examples 'is tolerant of invalid records' do
+ def run_expectation
+ expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ end
+ end
end
describe 'failed!' do
@@ -314,11 +344,18 @@ RSpec.describe WebHook do
expect { hook.failed! }.to change(hook, :recent_failures).by(1)
end
- it 'does not allow the failure count to exceed the maximum value' do
+ it 'does not update the hook if the the failure count exceeds the maximum value' do
hook.recent_failures = described_class::MAX_FAILURES
- expect(hook).not_to receive(:update!)
- expect { hook.failed! }.not_to change(hook, :recent_failures)
+ sql_count = ActiveRecord::QueryRecorder.new { hook.failed! }.count
+
+ expect(sql_count).to eq(0)
+ end
+
+ include_examples 'is tolerant of invalid records' do
+ def run_expectation
+ expect { hook.failed! }.to change(hook, :recent_failures).by(1)
+ end
end
end
@@ -326,5 +363,11 @@ RSpec.describe WebHook do
it 'disables a hook' do
expect { hook.disable! }.to change(hook, :executable?).from(true).to(false)
end
+
+ include_examples 'is tolerant of invalid records' do
+ def run_expectation
+ expect { hook.disable! }.to change(hook, :executable?).from(true).to(false)
+ end
+ end
end
end
diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb
new file mode 100644
index 00000000000..f3e7b90cf3c
--- /dev/null
+++ b/spec/models/incident_management/issuable_escalation_status_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::IssuableEscalationStatus do
+ let_it_be(:issue) { create(:issue) }
+
+ subject(:escalation_status) { build(:incident_management_issuable_escalation_status, issue: issue) }
+
+ it { is_expected.to be_valid }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:issue) }
+ end
+
+ describe 'validatons' do
+ it { is_expected.to validate_presence_of(:issue) }
+ it { is_expected.to validate_uniqueness_of(:issue) }
+ end
+
+ it_behaves_like 'a model including Escalatable'
+end
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index d3566ed04c2..9544f0fe6ec 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -96,6 +96,95 @@ RSpec.describe InstanceConfiguration do
expect(gitlab_ci[:artifacts_max_size][:value]).to eq(200.megabytes)
end
end
+
+ describe '#package_file_size_limits' do
+ let_it_be(:plan1) { create(:plan, name: 'plan1', title: 'Plan 1') }
+ let_it_be(:plan2) { create(:plan, name: 'plan2', title: 'Plan 2') }
+
+ before do
+ create(:plan_limits,
+ plan: plan1,
+ conan_max_file_size: 1001,
+ maven_max_file_size: 1002,
+ npm_max_file_size: 1003,
+ nuget_max_file_size: 1004,
+ pypi_max_file_size: 1005,
+ terraform_module_max_file_size: 1006,
+ generic_packages_max_file_size: 1007
+ )
+ create(:plan_limits,
+ plan: plan2,
+ conan_max_file_size: 1101,
+ maven_max_file_size: 1102,
+ npm_max_file_size: 1103,
+ nuget_max_file_size: 1104,
+ pypi_max_file_size: 1105,
+ terraform_module_max_file_size: 1106,
+ generic_packages_max_file_size: 1107
+ )
+ end
+
+ it 'returns package file size limits' do
+ file_size_limits = subject.settings[:package_file_size_limits]
+
+ expect(file_size_limits[:Plan1]).to eq({ conan: 1001, maven: 1002, npm: 1003, nuget: 1004, pypi: 1005, terraform_module: 1006, generic: 1007 })
+ expect(file_size_limits[:Plan2]).to eq({ conan: 1101, maven: 1102, npm: 1103, nuget: 1104, pypi: 1105, terraform_module: 1106, generic: 1107 })
+ end
+ end
+
+ describe '#rate_limits' do
+ before do
+ Gitlab::CurrentSettings.current_application_settings.update!(
+ throttle_unauthenticated_enabled: false,
+ throttle_unauthenticated_requests_per_period: 1001,
+ throttle_unauthenticated_period_in_seconds: 1002,
+ throttle_authenticated_api_enabled: true,
+ throttle_authenticated_api_requests_per_period: 1003,
+ throttle_authenticated_api_period_in_seconds: 1004,
+ throttle_authenticated_web_enabled: true,
+ throttle_authenticated_web_requests_per_period: 1005,
+ throttle_authenticated_web_period_in_seconds: 1006,
+ throttle_protected_paths_enabled: true,
+ throttle_protected_paths_requests_per_period: 1007,
+ throttle_protected_paths_period_in_seconds: 1008,
+ throttle_unauthenticated_packages_api_enabled: false,
+ throttle_unauthenticated_packages_api_requests_per_period: 1009,
+ throttle_unauthenticated_packages_api_period_in_seconds: 1010,
+ throttle_authenticated_packages_api_enabled: true,
+ throttle_authenticated_packages_api_requests_per_period: 1011,
+ throttle_authenticated_packages_api_period_in_seconds: 1012,
+ issues_create_limit: 1013,
+ notes_create_limit: 1014,
+ project_export_limit: 1015,
+ project_download_export_limit: 1016,
+ project_import_limit: 1017,
+ group_export_limit: 1018,
+ group_download_export_limit: 1019,
+ group_import_limit: 1020,
+ raw_blob_request_limit: 1021
+ )
+ end
+
+ it 'returns rate limits from application settings' do
+ rate_limits = subject.settings[:rate_limits]
+
+ expect(rate_limits[:unauthenticated]).to eq({ enabled: false, requests_per_period: 1001, period_in_seconds: 1002 })
+ expect(rate_limits[:authenticated_api]).to eq({ enabled: true, requests_per_period: 1003, period_in_seconds: 1004 })
+ expect(rate_limits[:authenticated_web]).to eq({ enabled: true, requests_per_period: 1005, period_in_seconds: 1006 })
+ expect(rate_limits[:protected_paths]).to eq({ enabled: true, requests_per_period: 1007, period_in_seconds: 1008 })
+ expect(rate_limits[:unauthenticated_packages_api]).to eq({ enabled: false, requests_per_period: 1009, period_in_seconds: 1010 })
+ expect(rate_limits[:authenticated_packages_api]).to eq({ enabled: true, requests_per_period: 1011, period_in_seconds: 1012 })
+ expect(rate_limits[:issue_creation]).to eq({ enabled: true, requests_per_period: 1013, period_in_seconds: 60 })
+ expect(rate_limits[:note_creation]).to eq({ enabled: true, requests_per_period: 1014, period_in_seconds: 60 })
+ expect(rate_limits[:project_export]).to eq({ enabled: true, requests_per_period: 1015, period_in_seconds: 60 })
+ expect(rate_limits[:project_export_download]).to eq({ enabled: true, requests_per_period: 1016, period_in_seconds: 60 })
+ expect(rate_limits[:project_import]).to eq({ enabled: true, requests_per_period: 1017, period_in_seconds: 60 })
+ expect(rate_limits[:group_export]).to eq({ enabled: true, requests_per_period: 1018, period_in_seconds: 60 })
+ expect(rate_limits[:group_export_download]).to eq({ enabled: true, requests_per_period: 1019, period_in_seconds: 60 })
+ expect(rate_limits[:group_import]).to eq({ enabled: true, requests_per_period: 1020, period_in_seconds: 60 })
+ expect(rate_limits[:raw_blob]).to eq({ enabled: true, requests_per_period: 1021, period_in_seconds: 60 })
+ end
+ end
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index ab4027170b2..f5f6a425fdd 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -21,38 +21,31 @@ RSpec.describe Integration do
it { is_expected.to validate_presence_of(:type) }
it { is_expected.to validate_exclusion_of(:type).in_array(described_class::BASE_CLASSES) }
- where(:project_id, :group_id, :template, :instance, :valid) do
- 1 | nil | false | false | true
- nil | 1 | false | false | true
- nil | nil | true | false | true
- nil | nil | false | true | true
- nil | nil | false | false | false
- nil | nil | true | true | false
- 1 | 1 | false | false | false
- 1 | nil | true | false | false
- 1 | nil | false | true | false
- nil | 1 | true | false | false
- nil | 1 | false | true | false
+ where(:project_id, :group_id, :instance, :valid) do
+ 1 | nil | false | true
+ nil | 1 | false | true
+ nil | nil | true | true
+ nil | nil | false | false
+ 1 | 1 | false | false
+ 1 | nil | false | true
+ 1 | nil | true | false
+ nil | 1 | false | true
+ nil | 1 | true | false
end
with_them do
it 'validates the service' do
- expect(build(:service, project_id: project_id, group_id: group_id, template: template, instance: instance).valid?).to eq(valid)
+ expect(build(:service, project_id: project_id, group_id: group_id, instance: instance).valid?).to eq(valid)
end
end
context 'with existing services' do
before_all do
- create(:service, :template)
create(:service, :instance)
create(:service, project: project)
create(:service, group: group, project: nil)
end
- it 'allows only one service template per type' do
- expect(build(:service, :template)).to be_invalid
- end
-
it 'allows only one instance service per type' do
expect(build(:service, :instance)).to be_invalid
end
@@ -68,6 +61,24 @@ RSpec.describe Integration do
end
describe 'Scopes' do
+ describe '.with_default_settings' do
+ it 'returns the correct integrations' do
+ instance_integration = create(:integration, :instance)
+ inheriting_integration = create(:integration, inherit_from_id: instance_integration.id)
+
+ expect(described_class.with_default_settings).to match_array([inheriting_integration])
+ end
+ end
+
+ describe '.with_custom_settings' do
+ it 'returns the correct integrations' do
+ instance_integration = create(:integration, :instance)
+ create(:integration, inherit_from_id: instance_integration.id)
+
+ expect(described_class.with_custom_settings).to match_array([instance_integration])
+ end
+ end
+
describe '.by_type' do
let!(:service1) { create(:jira_integration) }
let!(:service2) { create(:jira_integration) }
@@ -263,192 +274,108 @@ RSpec.describe Integration do
end
end
- describe 'template' do
- shared_examples 'retrieves service templates' do
- it 'returns the available service templates' do
- expect(Integration.find_or_create_templates.pluck(:type)).to match_array(Integration.available_integration_types(include_project_specific: false))
+ describe '.build_from_integration' do
+ context 'when integration is invalid' do
+ let(:invalid_integration) do
+ build(:prometheus_integration, :template, active: true, properties: {})
+ .tap { |integration| integration.save!(validate: false) }
end
- end
- describe '.find_or_create_templates' do
- it 'creates service templates' do
- total = Integration.available_integration_names(include_project_specific: false).size
+ it 'sets integration to inactive' do
+ integration = described_class.build_from_integration(invalid_integration, project_id: project.id)
- expect { Integration.find_or_create_templates }.to change(Integration, :count).from(0).to(total)
+ expect(integration).to be_valid
+ expect(integration.active).to be false
end
+ end
- it_behaves_like 'retrieves service templates'
-
- context 'with all existing templates' do
- before do
- Integration.insert_all(
- Integration.available_integration_types(include_project_specific: false).map { |type| { template: true, type: type } }
- )
- end
-
- it 'does not create service templates' do
- expect { Integration.find_or_create_templates }.not_to change { Integration.count }
- end
+ context 'when integration is an instance-level integration' do
+ let(:instance_integration) { create(:jira_integration, :instance) }
- it_behaves_like 'retrieves service templates'
+ it 'sets inherit_from_id from integration' do
+ integration = described_class.build_from_integration(instance_integration, project_id: project.id)
- context 'with a previous existing service (Previous) and a new service (Asana)' do
- before do
- Integration.insert({ type: 'PreviousService', template: true })
- Integration.delete_by(type: 'AsanaService', template: true)
- end
-
- it_behaves_like 'retrieves service templates'
- end
+ expect(integration.inherit_from_id).to eq(instance_integration.id)
end
+ end
- context 'with a few existing templates' do
- before do
- create(:jira_integration, :template)
- end
-
- it 'creates the rest of the service templates' do
- total = Integration.available_integration_names(include_project_specific: false).size
+ context 'when integration is a group-level integration' do
+ let(:group_integration) { create(:jira_integration, group: group, project: nil) }
- expect { Integration.find_or_create_templates }.to change(Integration, :count).from(1).to(total)
- end
+ it 'sets inherit_from_id from integration' do
+ integration = described_class.build_from_integration(group_integration, project_id: project.id)
- it_behaves_like 'retrieves service templates'
+ expect(integration.inherit_from_id).to eq(group_integration.id)
end
end
- describe '.build_from_integration' do
- context 'when integration is invalid' do
- let(:template_integration) do
- build(:prometheus_integration, :template, active: true, properties: {})
- .tap { |integration| integration.save!(validate: false) }
- end
-
- it 'sets integration to inactive' do
- integration = described_class.build_from_integration(template_integration, project_id: project.id)
-
- expect(integration).to be_valid
- expect(integration.active).to be false
- end
+ describe 'build issue tracker from an integration' do
+ let(:url) { 'http://jira.example.com' }
+ let(:api_url) { 'http://api-jira.example.com' }
+ let(:username) { 'jira-username' }
+ let(:password) { 'jira-password' }
+ let(:data_params) do
+ {
+ url: url, api_url: api_url,
+ username: username, password: password
+ }
end
- context 'when integration is an instance-level integration' do
- let(:instance_integration) { create(:jira_integration, :instance) }
-
- it 'sets inherit_from_id from integration' do
- integration = described_class.build_from_integration(instance_integration, project_id: project.id)
+ shared_examples 'service creation from an integration' do
+ it 'creates a correct service for a project integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
- expect(integration.inherit_from_id).to eq(instance_integration.id)
+ expect(service).to be_active
+ expect(service.url).to eq(url)
+ expect(service.api_url).to eq(api_url)
+ expect(service.username).to eq(username)
+ expect(service.password).to eq(password)
+ expect(service.instance).to eq(false)
+ expect(service.project).to eq(project)
+ expect(service.group).to eq(nil)
end
- end
-
- context 'when integration is a group-level integration' do
- let(:group_integration) { create(:jira_integration, group: group, project: nil) }
-
- it 'sets inherit_from_id from integration' do
- integration = described_class.build_from_integration(group_integration, project_id: project.id)
- expect(integration.inherit_from_id).to eq(group_integration.id)
+ it 'creates a correct service for a group integration' do
+ service = described_class.build_from_integration(integration, group_id: group.id)
+
+ expect(service).to be_active
+ expect(service.url).to eq(url)
+ expect(service.api_url).to eq(api_url)
+ expect(service.username).to eq(username)
+ expect(service.password).to eq(password)
+ expect(service.instance).to eq(false)
+ expect(service.project).to eq(nil)
+ expect(service.group).to eq(group)
end
end
- describe 'build issue tracker from an integration' do
- let(:url) { 'http://jira.example.com' }
- let(:api_url) { 'http://api-jira.example.com' }
- let(:username) { 'jira-username' }
- let(:password) { 'jira-password' }
- let(:data_params) do
- {
- url: url, api_url: api_url,
- username: username, password: password
- }
- end
-
- shared_examples 'service creation from an integration' do
- it 'creates a correct service for a project integration' do
- service = described_class.build_from_integration(integration, project_id: project.id)
-
- expect(service).to be_active
- expect(service.url).to eq(url)
- expect(service.api_url).to eq(api_url)
- expect(service.username).to eq(username)
- expect(service.password).to eq(password)
- expect(service.template).to eq(false)
- expect(service.instance).to eq(false)
- expect(service.project).to eq(project)
- expect(service.group).to eq(nil)
- end
-
- it 'creates a correct service for a group integration' do
- service = described_class.build_from_integration(integration, group_id: group.id)
-
- expect(service).to be_active
- expect(service.url).to eq(url)
- expect(service.api_url).to eq(api_url)
- expect(service.username).to eq(username)
- expect(service.password).to eq(password)
- expect(service.template).to eq(false)
- expect(service.instance).to eq(false)
- expect(service.project).to eq(nil)
- expect(service.group).to eq(group)
- end
+ # this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
+ context 'when data is stored in properties' do
+ let(:properties) { data_params }
+ let!(:integration) do
+ create(:jira_integration, :without_properties_callback, properties: properties.merge(additional: 'something'))
end
- # this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
- context 'when data are stored in properties' do
- let(:properties) { data_params }
- let!(:integration) do
- create(:jira_integration, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
- end
+ it_behaves_like 'service creation from an integration'
+ end
- it_behaves_like 'service creation from an integration'
+ context 'when data are stored in separated fields' do
+ let(:integration) do
+ create(:jira_integration, data_params.merge(properties: {}))
end
- context 'when data are stored in separated fields' do
- let(:integration) do
- create(:jira_integration, :template, data_params.merge(properties: {}))
- end
-
- it_behaves_like 'service creation from an integration'
- end
+ it_behaves_like 'service creation from an integration'
+ end
- context 'when data are stored in both properties and separated fields' do
- let(:properties) { data_params }
- let(:integration) do
- create(:jira_integration, :without_properties_callback, active: true, template: true, properties: properties).tap do |integration|
- create(:jira_tracker_data, data_params.merge(integration: integration))
- end
+ context 'when data are stored in both properties and separated fields' do
+ let(:properties) { data_params }
+ let(:integration) do
+ create(:jira_integration, :without_properties_callback, active: true, properties: properties).tap do |integration|
+ create(:jira_tracker_data, data_params.merge(integration: integration))
end
-
- it_behaves_like 'service creation from an integration'
end
- end
- end
- describe "for pushover service" do
- let!(:service_template) do
- Integrations::Pushover.create!(
- template: true,
- properties: {
- device: 'MyDevice',
- sound: 'mic',
- priority: 4,
- api_key: '123456789'
- })
- end
-
- describe 'is prefilled for projects pushover service' do
- it "has all fields prefilled" do
- integration = project.find_or_initialize_integration('pushover')
-
- expect(integration).to have_attributes(
- template: eq(false),
- device: eq('MyDevice'),
- sound: eq('mic'),
- priority: eq(4),
- api_key: eq('123456789')
- )
- end
+ it_behaves_like 'service creation from an integration'
end
end
end
@@ -510,121 +437,109 @@ RSpec.describe Integration do
end
describe '.create_from_active_default_integrations' do
- context 'with an active integration template' do
- let_it_be(:template_integration) { create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/') }
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
- it 'creates an integration from the template' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+ it 'creates an integration from the instance-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id)
expect(project.reload.integrations.size).to eq(1)
- expect(project.reload.integrations.first.api_url).to eq(template_integration.api_url)
- expect(project.reload.integrations.first.inherit_from_id).to be_nil
+ expect(project.reload.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to eq(instance_integration.id)
end
- context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
-
+ context 'passing a group' do
it 'creates an integration from the instance-level integration' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+ described_class.create_from_active_default_integrations(group, :group_id)
+
+ expect(group.reload.integrations.size).to eq(1)
+ expect(group.reload.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(group.reload.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ end
+ end
+
+ context 'with an active group-level integration' do
+ let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+
+ it 'creates an integration from the group-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id)
expect(project.reload.integrations.size).to eq(1)
- expect(project.reload.integrations.first.api_url).to eq(instance_integration.api_url)
- expect(project.reload.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ expect(project.reload.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to eq(group_integration.id)
end
context 'passing a group' do
- it 'creates an integration from the instance-level integration' do
- described_class.create_from_active_default_integrations(group, :group_id)
+ let!(:subgroup) { create(:group, parent: group) }
- expect(group.reload.integrations.size).to eq(1)
- expect(group.reload.integrations.first.api_url).to eq(instance_integration.api_url)
- expect(group.reload.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ it 'creates an integration from the group-level integration' do
+ described_class.create_from_active_default_integrations(subgroup, :group_id)
+
+ expect(subgroup.reload.integrations.size).to eq(1)
+ expect(subgroup.reload.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(subgroup.reload.integrations.first.inherit_from_id).to eq(group_integration.id)
end
end
- context 'with an active group-level integration' do
- let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ context 'with an active subgroup' do
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup) { create(:group, parent: group) }
+ let(:project) { create(:project, group: subgroup) }
- it 'creates an integration from the group-level integration' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+ it 'creates an integration from the subgroup-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id)
expect(project.reload.integrations.size).to eq(1)
- expect(project.reload.integrations.first.api_url).to eq(group_integration.api_url)
- expect(project.reload.integrations.first.inherit_from_id).to eq(group_integration.id)
+ expect(project.reload.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
context 'passing a group' do
- let!(:subgroup) { create(:group, parent: group) }
-
- it 'creates an integration from the group-level integration' do
- described_class.create_from_active_default_integrations(subgroup, :group_id)
-
- expect(subgroup.reload.integrations.size).to eq(1)
- expect(subgroup.reload.integrations.first.api_url).to eq(group_integration.api_url)
- expect(subgroup.reload.integrations.first.inherit_from_id).to eq(group_integration.id)
- end
- end
+ let!(:sub_subgroup) { create(:group, parent: subgroup) }
- context 'with an active subgroup' do
- let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
- let!(:subgroup) { create(:group, parent: group) }
- let(:project) { create(:project, group: subgroup) }
+ context 'traversal queries' do
+ shared_examples 'correct ancestor order' do
+ it 'creates an integration from the subgroup-level integration' do
+ described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
- it 'creates an integration from the subgroup-level integration' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+ sub_subgroup.reload
- expect(project.reload.integrations.size).to eq(1)
- expect(project.reload.integrations.first.api_url).to eq(subgroup_integration.api_url)
- expect(project.reload.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
- end
+ expect(sub_subgroup.integrations.size).to eq(1)
+ expect(sub_subgroup.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(sub_subgroup.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
- context 'passing a group' do
- let!(:sub_subgroup) { create(:group, parent: subgroup) }
+ context 'having an integration inheriting settings' do
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
- context 'traversal queries' do
- shared_examples 'correct ancestor order' do
- it 'creates an integration from the subgroup-level integration' do
+ it 'creates an integration from the group-level integration' do
described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
sub_subgroup.reload
expect(sub_subgroup.integrations.size).to eq(1)
- expect(sub_subgroup.integrations.first.api_url).to eq(subgroup_integration.api_url)
- expect(sub_subgroup.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
- end
-
- context 'having an integration inheriting settings' do
- let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
-
- it 'creates an integration from the group-level integration' do
- described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
-
- sub_subgroup.reload
-
- expect(sub_subgroup.integrations.size).to eq(1)
- expect(sub_subgroup.integrations.first.api_url).to eq(group_integration.api_url)
- expect(sub_subgroup.integrations.first.inherit_from_id).to eq(group_integration.id)
- end
+ expect(sub_subgroup.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(sub_subgroup.integrations.first.inherit_from_id).to eq(group_integration.id)
end
end
+ end
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples 'correct ancestor order'
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
end
- context 'linear' do
- before do
- stub_feature_flags(use_traversal_ids: true)
+ include_examples 'correct ancestor order'
+ end
- sub_subgroup.reload # make sure traversal_ids are reloaded
- end
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
- include_examples 'correct ancestor order'
+ sub_subgroup.reload # make sure traversal_ids are reloaded
end
+
+ include_examples 'correct ancestor order'
end
end
end
diff --git a/spec/models/integrations/bamboo_spec.rb b/spec/models/integrations/bamboo_spec.rb
index 73ebf404828..60ff6685c3d 100644
--- a/spec/models/integrations/bamboo_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
subject(:integration) do
described_class.create!(
+ active: true,
project: project,
properties: {
bamboo_url: bamboo_url,
@@ -74,27 +75,27 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
end
describe 'Callbacks' do
- describe 'before_update :reset_password' do
+ describe 'before_validation :reset_password' do
context 'when a password was previously set' do
it 'resets password if url changed' do
integration.bamboo_url = 'http://gitlab1.com'
- integration.save!
+ expect(integration).not_to be_valid
expect(integration.password).to be_nil
end
it 'does not reset password if username changed' do
integration.username = 'some_name'
- integration.save!
+ expect(integration).to be_valid
expect(integration.password).to eq('password')
end
it "does not reset password if new url is set together with password, even if it's the same password" do
integration.bamboo_url = 'http://gitlab_edited.com'
integration.password = 'password'
- integration.save!
+ expect(integration).to be_valid
expect(integration.password).to eq('password')
expect(integration.bamboo_url).to eq('http://gitlab_edited.com')
end
@@ -107,8 +108,10 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
integration.password = 'password'
integration.save!
- expect(integration.password).to eq('password')
- expect(integration.bamboo_url).to eq('http://gitlab_edited.com')
+ expect(integration.reload).to have_attributes(
+ bamboo_url: 'http://gitlab_edited.com',
+ password: 'password'
+ )
end
end
end
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index e2749ab1bc1..677bd4c5e48 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -6,7 +6,8 @@ require 'spec_helper'
RSpec.describe Integrations::Datadog do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:build) { create(:ci_build, project: project) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:retried_build) { create(:ci_build, :retried, pipeline: pipeline) }
let(:active) { true }
let(:dd_site) { 'datadoghq.com' }
@@ -139,26 +140,38 @@ RSpec.describe Integrations::Datadog do
end
describe '#test' do
- context 'when request is succesful' do
- subject { saved_instance.test(pipeline_data) }
+ subject(:result) { saved_instance.test(pipeline_data) }
- before do
- stub_request(:post, expected_hook_url).to_return(body: 'OK')
- end
+ let(:body) { 'OK' }
+ let(:status) { 200 }
+
+ before do
+ stub_request(:post, expected_hook_url).to_return(body: body, status: status)
+ end
+
+ context 'when request is successful with a HTTP 200 status' do
it { is_expected.to eq({ success: true, result: 'OK' }) }
end
- context 'when request fails' do
- subject { saved_instance.test(pipeline_data) }
+ context 'when request is successful with a HTTP 202 status' do
+ let(:status) { 202 }
+
+ it { is_expected.to eq({ success: true, result: 'OK' }) }
+ end
+
+ context 'when request fails with a HTTP 500 status' do
+ let(:status) { 500 }
+ let(:body) { 'CRASH!!!' }
- before do
- stub_request(:post, expected_hook_url).to_return(body: 'CRASH!!!', status: 500)
- end
it { is_expected.to eq({ success: false, result: 'CRASH!!!' }) }
end
end
describe '#execute' do
+ around do |example|
+ freeze_time { example.run }
+ end
+
before do
stub_request(:post, expected_hook_url)
saved_instance.execute(data)
@@ -166,20 +179,18 @@ RSpec.describe Integrations::Datadog do
context 'with pipeline data' do
let(:data) { pipeline_data }
- let(:expected_headers) do
- { WebHookService::GITLAB_EVENT_HEADER => 'Pipeline Hook' }
- end
+ let(:expected_headers) { { WebHookService::GITLAB_EVENT_HEADER => 'Pipeline Hook' } }
+ let(:expected_body) { data.with_retried_builds.to_json }
- it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made }
+ it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers, body: expected_body)).to have_been_made }
end
context 'with job data' do
let(:data) { build_data }
- let(:expected_headers) do
- { WebHookService::GITLAB_EVENT_HEADER => 'Job Hook' }
- end
+ let(:expected_headers) { { WebHookService::GITLAB_EVENT_HEADER => 'Job Hook' } }
+ let(:expected_body) { data.to_json }
- it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made }
+ it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers, body: expected_body)).to have_been_made }
end
end
end
diff --git a/spec/models/integrations/jenkins_spec.rb b/spec/models/integrations/jenkins_spec.rb
index 9eb2a7fc098..9286d026290 100644
--- a/spec/models/integrations/jenkins_spec.rb
+++ b/spec/models/integrations/jenkins_spec.rb
@@ -200,21 +200,21 @@ RSpec.describe Integrations::Jenkins do
it 'resets password if url changed' do
jenkins_integration.jenkins_url = 'http://jenkins-edited.example.com/'
- jenkins_integration.save!
+ jenkins_integration.valid?
expect(jenkins_integration.password).to be_nil
end
it 'resets password if username is blank' do
jenkins_integration.username = ''
- jenkins_integration.save!
+ jenkins_integration.valid?
expect(jenkins_integration.password).to be_nil
end
it 'does not reset password if username changed' do
jenkins_integration.username = 'some_name'
- jenkins_integration.save!
+ jenkins_integration.valid?
expect(jenkins_integration.password).to eq('password')
end
@@ -222,7 +222,7 @@ RSpec.describe Integrations::Jenkins do
it 'does not reset password if new url is set together with password, even if it\'s the same password' do
jenkins_integration.jenkins_url = 'http://jenkins_edited.example.com/'
jenkins_integration.password = 'password'
- jenkins_integration.save!
+ jenkins_integration.valid?
expect(jenkins_integration.password).to eq('password')
expect(jenkins_integration.jenkins_url).to eq('http://jenkins_edited.example.com/')
@@ -231,7 +231,7 @@ RSpec.describe Integrations::Jenkins do
it 'resets password if url changed, even if setter called multiple times' do
jenkins_integration.jenkins_url = 'http://jenkins1.example.com/'
jenkins_integration.jenkins_url = 'http://jenkins1.example.com/'
- jenkins_integration.save!
+ jenkins_integration.valid?
expect(jenkins_integration.password).to be_nil
end
@@ -253,8 +253,10 @@ RSpec.describe Integrations::Jenkins do
jenkins_integration.password = 'password'
jenkins_integration.save!
- expect(jenkins_integration.password).to eq('password')
- expect(jenkins_integration.jenkins_url).to eq('http://jenkins_edited.example.com/')
+ expect(jenkins_integration.reload).to have_attributes(
+ jenkins_url: 'http://jenkins_edited.example.com/',
+ password: 'password'
+ )
end
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 6ca72d68bbb..0321b151633 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -280,7 +280,7 @@ RSpec.describe Integrations::Jira do
expect(integration.jira_tracker_data.deployment_server?).to be_truthy
- integration.update!(api_url: 'http://another.url')
+ integration.update!(api_url: 'http://another.url', password: password)
integration.jira_tracker_data.reload
expect(integration.jira_tracker_data.deployment_cloud?).to be_truthy
@@ -301,13 +301,13 @@ RSpec.describe Integrations::Jira do
end
it 'calls serverInfo for url' do
- integration.update!(url: 'http://first.url')
+ integration.update!(url: 'http://first.url', password: password)
expect(WebMock).to have_requested(:get, /serverInfo/)
end
it 'calls serverInfo for api_url' do
- integration.update!(api_url: 'http://another.url')
+ integration.update!(api_url: 'http://another.url', password: password)
expect(WebMock).to have_requested(:get, /serverInfo/)
end
@@ -334,16 +334,6 @@ RSpec.describe Integrations::Jira do
end
end
- context 'when not allowed to test an instance or group' do
- it 'does not update deployment type' do
- allow(integration).to receive(:testable?).and_return(false)
-
- integration.update!(url: 'http://first.url')
-
- expect(WebMock).not_to have_requested(:get, /serverInfo/)
- end
- end
-
context 'stored password invalidation' do
context 'when a password was previously set' do
context 'when only web url present' do
@@ -358,33 +348,33 @@ RSpec.describe Integrations::Jira do
it 'resets password if url changed' do
integration
integration.url = 'http://jira_edited.example.com'
- integration.save!
- expect(integration.reload.url).to eq('http://jira_edited.example.com')
+ expect(integration).not_to be_valid
+ expect(integration.url).to eq('http://jira_edited.example.com')
expect(integration.password).to be_nil
end
it 'does not reset password if url "changed" to the same url as before' do
integration.url = 'http://jira.example.com'
- integration.save!
- expect(integration.reload.url).to eq('http://jira.example.com')
+ expect(integration).to be_valid
+ expect(integration.url).to eq('http://jira.example.com')
expect(integration.password).not_to be_nil
end
it 'resets password if url not changed but api url added' do
integration.api_url = 'http://jira_edited.example.com/rest/api/2'
- integration.save!
- expect(integration.reload.api_url).to eq('http://jira_edited.example.com/rest/api/2')
+ expect(integration).not_to be_valid
+ expect(integration.api_url).to eq('http://jira_edited.example.com/rest/api/2')
expect(integration.password).to be_nil
end
it 'does not reset password if new url is set together with password, even if it\'s the same password' do
integration.url = 'http://jira_edited.example.com'
integration.password = password
- integration.save!
+ expect(integration).to be_valid
expect(integration.password).to eq(password)
expect(integration.url).to eq('http://jira_edited.example.com')
end
@@ -392,32 +382,32 @@ RSpec.describe Integrations::Jira do
it 'resets password if url changed, even if setter called multiple times' do
integration.url = 'http://jira1.example.com/rest/api/2'
integration.url = 'http://jira1.example.com/rest/api/2'
- integration.save!
+ expect(integration).not_to be_valid
expect(integration.password).to be_nil
end
it 'does not reset password if username changed' do
integration.username = 'some_name'
- integration.save!
- expect(integration.reload.password).to eq(password)
+ expect(integration).to be_valid
+ expect(integration.password).to eq(password)
end
it 'does not reset password if password changed' do
integration.url = 'http://jira_edited.example.com'
integration.password = 'new_password'
- integration.save!
- expect(integration.reload.password).to eq('new_password')
+ expect(integration).to be_valid
+ expect(integration.password).to eq('new_password')
end
it 'does not reset password if the password is touched and same as before' do
integration.url = 'http://jira_edited.example.com'
integration.password = password
- integration.save!
- expect(integration.reload.password).to eq(password)
+ expect(integration).to be_valid
+ expect(integration.password).to eq(password)
end
end
@@ -432,22 +422,23 @@ RSpec.describe Integrations::Jira do
it 'resets password if api url changed' do
integration.api_url = 'http://jira_edited.example.com/rest/api/2'
- integration.save!
+ expect(integration).not_to be_valid
expect(integration.password).to be_nil
end
it 'does not reset password if url changed' do
integration.url = 'http://jira_edited.example.com'
- integration.save!
+ expect(integration).to be_valid
expect(integration.password).to eq(password)
end
it 'resets password if api url set to empty' do
- integration.update!(api_url: '')
+ integration.api_url = ''
- expect(integration.reload.password).to be_nil
+ expect(integration).not_to be_valid
+ expect(integration.password).to be_nil
end
end
end
@@ -463,8 +454,11 @@ RSpec.describe Integrations::Jira do
integration.url = 'http://jira_edited.example.com/rest/api/2'
integration.password = 'password'
integration.save!
- expect(integration.reload.password).to eq('password')
- expect(integration.reload.url).to eq('http://jira_edited.example.com/rest/api/2')
+
+ expect(integration.reload).to have_attributes(
+ url: 'http://jira_edited.example.com/rest/api/2',
+ password: 'password'
+ )
end
end
end
@@ -492,7 +486,7 @@ RSpec.describe Integrations::Jira do
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params }
let(:integration) do
- create(:jira_integration, :without_properties_callback, active: false, properties: properties).tap do |integration|
+ create(:jira_integration, :without_properties_callback, properties: properties).tap do |integration|
create(:jira_tracker_data, data_params.merge(integration: integration))
end
end
diff --git a/spec/models/integrations/teamcity_spec.rb b/spec/models/integrations/teamcity_spec.rb
index d425357aef0..0713141ea08 100644
--- a/spec/models/integrations/teamcity_spec.rb
+++ b/spec/models/integrations/teamcity_spec.rb
@@ -76,18 +76,18 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
describe 'Callbacks' do
let(:teamcity_integration) { integration }
- describe 'before_update :reset_password' do
+ describe 'before_validation :reset_password' do
context 'when a password was previously set' do
it 'resets password if url changed' do
teamcity_integration.teamcity_url = 'http://gitlab1.com'
- teamcity_integration.save!
+ teamcity_integration.valid?
expect(teamcity_integration.password).to be_nil
end
it 'does not reset password if username changed' do
teamcity_integration.username = 'some_name'
- teamcity_integration.save!
+ teamcity_integration.valid?
expect(teamcity_integration.password).to eq('password')
end
@@ -95,7 +95,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
it "does not reset password if new url is set together with password, even if it's the same password" do
teamcity_integration.teamcity_url = 'http://gitlab_edited.com'
teamcity_integration.password = 'password'
- teamcity_integration.save!
+ teamcity_integration.valid?
expect(teamcity_integration.password).to eq('password')
expect(teamcity_integration.teamcity_url).to eq('http://gitlab_edited.com')
@@ -109,8 +109,10 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
teamcity_integration.password = 'password'
teamcity_integration.save!
- expect(teamcity_integration.password).to eq('password')
- expect(teamcity_integration.teamcity_url).to eq('http://gitlab_edited.com')
+ expect(teamcity_integration.reload).to have_attributes(
+ teamcity_url: 'http://gitlab_edited.com',
+ password: 'password'
+ )
end
end
end
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 696b5b48cbf..6aba91d9471 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe InternalId do
context 'when executed outside of transaction' do
it 'increments counter with in_transaction: "false"' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false } # rubocop: disable Database/MultipleDatabases
expect(InternalId.internal_id_transactions_total).to receive(:increment)
.with(operation: :generate, usage: 'issues', in_transaction: 'false').and_call_original
@@ -147,7 +147,7 @@ RSpec.describe InternalId do
let(:value) { 2 }
it 'increments counter with in_transaction: "false"' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false } # rubocop: disable Database/MultipleDatabases
expect(InternalId.internal_id_transactions_total).to receive(:increment)
.with(operation: :reset, usage: 'issues', in_transaction: 'false').and_call_original
@@ -218,7 +218,7 @@ RSpec.describe InternalId do
context 'when executed outside of transaction' do
it 'increments counter with in_transaction: "false"' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false } # rubocop: disable Database/MultipleDatabases
expect(InternalId.internal_id_transactions_total).to receive(:increment)
.with(operation: :track_greatest, usage: 'issues', in_transaction: 'false').and_call_original
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 441446bae60..116bda7a18b 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Issue do
it { is_expected.to belong_to(:iteration) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_one(:namespace).through(:project) }
+ it { is_expected.to belong_to(:work_item_type).class_name('WorkItem::Type') }
it { is_expected.to belong_to(:moved_to).class_name('Issue') }
it { is_expected.to have_one(:moved_from).class_name('Issue') }
it { is_expected.to belong_to(:duplicated_to).class_name('Issue') }
@@ -31,6 +32,8 @@ RSpec.describe Issue do
it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:prometheus_alerts) }
it { is_expected.to have_many(:issue_email_participants) }
+ it { is_expected.to have_many(:timelogs).autosave(true) }
+ it { is_expected.to have_one(:incident_management_issuable_escalation_status) }
describe 'versions.most_recent' do
it 'returns the most recent version' do
@@ -614,33 +617,40 @@ RSpec.describe Issue do
let(:subject) { create :issue }
end
- describe "#to_branch_name" do
- let_it_be(:issue) { create(:issue, project: reusable_project, title: 'testing-issue') }
+ describe '.to_branch_name' do
+ it 'parameterizes arguments and joins with dashes' do
+ expect(described_class.to_branch_name(123, 'foo bar', '!@#$%', 'f!o@o#b$a%r^')).to eq('123-foo-bar-f-o-o-b-a-r')
+ end
- it 'starts with the issue iid' do
- expect(issue.to_branch_name).to match(/\A#{issue.iid}-[A-Za-z\-]+\z/)
+ it 'preserves the case in the first argument' do
+ expect(described_class.to_branch_name('ACME-!@#$-123', 'FoO BaR')).to eq('ACME-123-foo-bar')
end
- it "contains the issue title if not confidential" do
- expect(issue.to_branch_name).to match(/testing-issue\z/)
+ it 'truncates branch name to at most 100 characters' do
+ expect(described_class.to_branch_name('a' * 101)).to eq('a' * 100)
end
- it "does not contain the issue title if confidential" do
- issue = create(:issue, project: reusable_project, title: 'testing-issue', confidential: true)
- expect(issue.to_branch_name).to match(/confidential-issue\z/)
+ it 'truncates dangling parts of the branch name' do
+ branch_name = described_class.to_branch_name(
+ 999,
+ 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis'
+ )
+
+ # 100 characters would've got us "999-lorem...lacus-custom-fri".
+ expect(branch_name).to eq('999-lorem-ipsum-dolor-sit-amet-consectetur-adipiscing-elit-mauris-sit-amet-ipsum-id-lacus-custom')
end
+ end
- context 'issue title longer than 100 characters' do
- let_it_be(:issue) { create(:issue, project: reusable_project, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
+ describe '#to_branch_name' do
+ let_it_be(:issue) { create(:issue, project: reusable_project, iid: 123, title: 'Testing Issue') }
- it "truncates branch name to at most 100 characters" do
- expect(issue.to_branch_name.length).to be <= 100
- end
+ it 'returns a branch name with the issue title if not confidential' do
+ expect(issue.to_branch_name).to eq('123-testing-issue')
+ end
- it "truncates dangling parts of the branch name" do
- # 100 characters would've got us "999-lorem...lacus-custom-fri".
- expect(issue.to_branch_name).to eq("999-lorem-ipsum-dolor-sit-amet-consectetur-adipiscing-elit-mauris-sit-amet-ipsum-id-lacus-custom")
- end
+ it 'returns a generic branch name if confidential' do
+ issue.confidential = true
+ expect(issue.to_branch_name).to eq('123-confidential-issue')
end
end
@@ -787,17 +797,47 @@ RSpec.describe Issue do
end
end
+ shared_examples 'hidden issue readable by user' do
+ before do
+ issue.author.ban!
+ end
+
+ specify do
+ is_expected.to eq(true)
+ end
+
+ after do
+ issue.author.activate!
+ end
+ end
+
+ shared_examples 'hidden issue not readable by user' do
+ before do
+ issue.author.ban!
+ end
+
+ specify do
+ is_expected.to eq(false)
+ end
+
+ after do
+ issue.author.activate!
+ end
+ end
+
context 'with an admin user' do
let(:user) { build(:admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
+ it_behaves_like 'hidden issue readable by user'
end
context 'when admin mode is disabled' do
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
end
@@ -808,6 +848,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
context 'with a reporter user' do
@@ -817,6 +858,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
context 'with a guest user' do
@@ -826,6 +868,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
+ it_behaves_like 'hidden issue not readable by user'
context 'when user is an assignee' do
before do
@@ -834,6 +877,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
context 'when user is the author' do
@@ -843,6 +887,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
end
@@ -852,6 +897,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
context 'using an internal project' do
@@ -864,6 +910,7 @@ RSpec.describe Issue do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
context 'using an external user' do
@@ -873,6 +920,7 @@ RSpec.describe Issue do
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
end
@@ -883,6 +931,7 @@ RSpec.describe Issue do
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
+ it_behaves_like 'hidden issue not readable by user'
end
end
@@ -1112,14 +1161,14 @@ RSpec.describe Issue do
with_them do
it 'checks for spam when necessary' do
- author = support_bot? ? support_bot : user
+ active_user = support_bot? ? support_bot : user
project = reusable_project
project.update!(visibility_level: visibility_level)
- issue = create(:issue, project: project, confidential: confidential, description: 'original description', author: author)
+ issue = create(:issue, project: project, confidential: confidential, description: 'original description', author: support_bot)
issue.assign_attributes(new_attributes)
- expect(issue.check_for_spam?).to eq(check_for_spam?)
+ expect(issue.check_for_spam?(user: active_user)).to eq(check_for_spam?)
end
end
end
@@ -1151,6 +1200,26 @@ RSpec.describe Issue do
end
end
+ describe '.without_hidden' do
+ let_it_be(:banned_user) { create(:user, :banned) }
+ let_it_be(:public_issue) { create(:issue, project: reusable_project) }
+ let_it_be(:hidden_issue) { create(:issue, project: reusable_project, author: banned_user) }
+
+ it 'only returns without_hidden issues' do
+ expect(described_class.without_hidden).to eq([public_issue])
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it 'returns public and hidden issues' do
+ expect(described_class.without_hidden).to eq([public_issue, hidden_issue])
+ end
+ end
+ end
+
describe '.by_project_id_and_iid' do
let_it_be(:issue_a) { create(:issue, project: reusable_project) }
let_it_be(:issue_b) { create(:issue, iid: issue_a.iid) }
diff --git a/spec/models/jira_connect_installation_spec.rb b/spec/models/jira_connect_installation_spec.rb
index 8ef96114c45..3d1095845aa 100644
--- a/spec/models/jira_connect_installation_spec.rb
+++ b/spec/models/jira_connect_installation_spec.rb
@@ -15,6 +15,9 @@ RSpec.describe JiraConnectInstallation do
it { is_expected.to allow_value('https://test.atlassian.net').for(:base_url) }
it { is_expected.not_to allow_value('not/a/url').for(:base_url) }
+
+ it { is_expected.to allow_value('https://test.atlassian.net').for(:instance_url) }
+ it { is_expected.not_to allow_value('not/a/url').for(:instance_url) }
end
describe '.for_project' do
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 5824c2085ce..067b3c25645 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -64,6 +64,49 @@ RSpec.describe Member do
end
end
+ context 'with admin signup restrictions' do
+ context 'when allowed domains for signup is enabled' do
+ before do
+ stub_application_setting(domain_allowlist: ['example.com'])
+ end
+
+ it 'adds an error message when email is not accepted' do
+ member = build(:group_member, :invited, invite_email: 'info@gitlab.com')
+
+ expect(member).not_to be_valid
+ expect(member.errors.messages[:user].first).to eq(_('domain is not authorized for sign-up.'))
+ end
+ end
+
+ context 'when denylist is enabled' do
+ before do
+ stub_application_setting(domain_denylist_enabled: true)
+ stub_application_setting(domain_denylist: ['example.org'])
+ end
+
+ it 'adds an error message when email is denied' do
+ member = build(:group_member, :invited, invite_email: 'denylist@example.org')
+
+ expect(member).not_to be_valid
+ expect(member.errors.messages[:user].first).to eq(_('is not from an allowed domain.'))
+ end
+ end
+
+ context 'when email restrictions is enabled' do
+ before do
+ stub_application_setting(email_restrictions_enabled: true)
+ stub_application_setting(email_restrictions: '([\+]|\b(\w*gitlab.com\w*)\b)')
+ end
+
+ it 'adds an error message when email is not accepted' do
+ member = build(:group_member, :invited, invite_email: 'info@gitlab.com')
+
+ expect(member).not_to be_valid
+ expect(member.errors.messages[:user].first).to eq(_('is not allowed. Try again with a different email address, or contact your GitLab admin.'))
+ end
+ end
+ end
+
context "when a child member inherits its access level" do
let(:user) { create(:user) }
let(:member) { create(:group_member, :developer, user: user) }
@@ -624,7 +667,23 @@ RSpec.describe Member do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
it "sets the invite token" do
- expect { member.generate_invite_token }.to change { member.invite_token}
+ expect { member.generate_invite_token }.to change { member.invite_token }
+ end
+ end
+
+ describe 'generate invite token on create' do
+ let!(:member) { build(:project_member, invite_email: "user@example.com") }
+
+ it "sets the invite token" do
+ expect { member.save! }.to change { member.invite_token }.to(kind_of(String))
+ end
+
+ context 'when invite was already accepted' do
+ it "does not set invite token" do
+ member.invite_accepted_at = 1.day.ago
+
+ expect { member.save! }.not_to change { member.invite_token }.from(nil)
+ end
end
end
@@ -749,4 +808,44 @@ RSpec.describe Member do
end
end
end
+
+ describe 'log_invitation_token_cleanup' do
+ let_it_be(:project) { create :project }
+
+ context 'when on gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return true
+ end
+
+ it "doesn't log info for members without invitation or accepted invitation" do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ create :project_member
+ create :project_member, :invited, invite_accepted_at: nil
+ create :project_member, invite_token: nil, invite_accepted_at: Time.zone.now
+ end
+
+ it 'logs error for accepted members with token and creates membership' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(kind_of(StandardError), kind_of(Hash))
+
+ expect do
+ create :project_member, :invited, source: project, invite_accepted_at: Time.zone.now
+ end.to change { Member.count }.by(1)
+ end
+ end
+
+ context 'when not on gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return false
+ end
+
+ it 'does not log error for accepted members with token and creates membership' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ expect do
+ create :project_member, :invited, source: project, invite_accepted_at: Time.zone.now
+ end.to change { Member.count }.by(1)
+ end
+ end
+ end
end
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 472f4280d26..92f9099d04d 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -136,4 +136,16 @@ RSpec.describe GroupMember do
group_member.update!(expires_at: 5.days.from_now)
end
end
+
+ describe 'refresh_member_authorized_projects' do
+ context 'when importing' do
+ it 'does not refresh' do
+ expect(UserProjectAccessChangedService).not_to receive(:new)
+
+ member = build(:group_member)
+ member.importing = true
+ member.save!
+ end
+ end
+ end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 4c59bda856f..1704d5adb96 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -139,4 +139,171 @@ RSpec.describe ProjectMember do
end
end
end
+
+ context 'refreshing project_authorizations' do
+ let_it_be_with_refind(:project) { create(:project) }
+ let_it_be_with_refind(:user) { create(:user) }
+ let_it_be(:project_member) { create(:project_member, :guest, project: project, user: user) }
+
+ context 'when the source project of the project member is destroyed' do
+ it 'refreshes the authorization of user to the project in the group' do
+ expect { project.destroy! }.to change { user.can?(:guest_access, project) }.from(true).to(false)
+ end
+
+ it 'refreshes the authorization without calling AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserService).not_to receive(:new)
+
+ project.destroy!
+ end
+ end
+
+ context 'when the user of the project member is destroyed' do
+ it 'refreshes the authorization of user to the project in the group' do
+ expect(project.authorized_users).to include(user)
+
+ user.destroy!
+
+ expect(project.authorized_users).not_to include(user)
+ end
+
+ it 'refreshes the authorization without calling UserProjectAccessChangedService' do
+ expect(UserProjectAccessChangedService).not_to receive(:new)
+
+ user.destroy!
+ end
+ end
+
+ context 'when importing' do
+ it 'does not refresh' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserService).not_to receive(:new)
+
+ member = build(:project_member)
+ member.importing = true
+ member.save!
+ end
+ end
+ end
+
+ context 'authorization refresh on addition/updation/deletion' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ shared_examples_for 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations' do
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
+ expect_next_instance_of(AuthorizedProjectUpdate::ProjectRecalculatePerUserService, project, user) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ action
+ end
+ end
+
+ shared_examples_for 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
+
+ action
+ end
+ end
+
+ context 'on create' do
+ let(:action) { project.add_user(user, Gitlab::Access::GUEST) }
+
+ it 'changes access level' do
+ expect { action }.to change { user.can?(:guest_access, project) }.from(false).to(true)
+ end
+
+ it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
+ it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
+ end
+
+ context 'on update' do
+ let(:action) { project.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
+
+ before do
+ project.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ it 'changes access level' do
+ expect { action }.to change { user.can?(:developer_access, project) }.from(false).to(true)
+ end
+
+ it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
+ it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
+ end
+
+ context 'on destroy' do
+ let(:action) { project.members.find_by(user: user).destroy! }
+
+ before do
+ project.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ it 'changes access level' do
+ expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
+ end
+
+ it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
+ it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
+ end
+
+ context 'when the feature flag `specialized_service_for_project_member_auth_refresh` is disabled' do
+ before do
+ stub_feature_flags(specialized_service_for_project_member_auth_refresh: false)
+ end
+
+ shared_examples_for 'calls UserProjectAccessChangedService to recalculate authorizations' do
+ it 'calls UserProjectAccessChangedService' do
+ expect_next_instance_of(UserProjectAccessChangedService, user.id) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ action
+ end
+ end
+
+ context 'on create' do
+ let(:action) { project.add_user(user, Gitlab::Access::GUEST) }
+
+ it 'changes access level' do
+ expect { action }.to change { user.can?(:guest_access, project) }.from(false).to(true)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+
+ context 'on update' do
+ let(:action) { project.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
+
+ before do
+ project.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ it 'changes access level' do
+ expect { action }.to change { user.can?(:developer_access, project) }.from(false).to(true)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+
+ context 'on destroy' do
+ let(:action) { project.members.find_by(user: user).destroy! }
+
+ before do
+ project.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ it 'changes access level' do
+ expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index 6290468d4a7..adddec7ced8 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe MergeRequestDiffCommit do
subject { described_class.create_bulk(merge_request_diff_id, commits) }
it 'inserts the commits into the database en masse' do
- expect(Gitlab::Database).to receive(:bulk_insert)
+ expect(Gitlab::Database.main).to receive(:bulk_insert)
.with(described_class.table_name, rows)
subject
@@ -126,7 +126,7 @@ RSpec.describe MergeRequestDiffCommit do
end
it 'uses a sanitized date' do
- expect(Gitlab::Database).to receive(:bulk_insert)
+ expect(Gitlab::Database.main).to receive(:bulk_insert)
.with(described_class.table_name, rows)
subject
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index e0e25031589..5fff880c44e 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -240,7 +240,7 @@ RSpec.describe MergeRequestDiff do
stub_external_diffs_setting(enabled: true)
expect(diff).not_to receive(:save!)
- expect(Gitlab::Database)
+ expect(Gitlab::Database.main)
.to receive(:bulk_insert)
.with('merge_request_diff_files', anything)
.and_raise(ActiveRecord::Rollback)
@@ -465,11 +465,13 @@ RSpec.describe MergeRequestDiff do
it 'sorts diff files directory first' do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
- expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_file_paths).to eq([
+ # There will be 11 returned, as we have to take into account for new and old paths
+ expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_paths).to eq([
'bar/branch-test.txt',
'custom-highlighting/test.gitlab-custom',
'encoding/iso8859.txt',
'files/images/wm.svg',
+ 'files/js/commit.js.coffee',
'files/js/commit.coffee',
'files/lfs/lfs_object.iso',
'files/ruby/popen.rb',
@@ -553,11 +555,12 @@ RSpec.describe MergeRequestDiff do
it 'sorts diff files directory first' do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
- expect(diff_with_commits.diffs(diff_options).diff_file_paths).to eq([
+ expect(diff_with_commits.diffs(diff_options).diff_paths).to eq([
'bar/branch-test.txt',
'custom-highlighting/test.gitlab-custom',
'encoding/iso8859.txt',
'files/images/wm.svg',
+ 'files/js/commit.js.coffee',
'files/js/commit.coffee',
'files/lfs/lfs_object.iso',
'files/ruby/popen.rb',
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index edd543854cb..4a8a2909891 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -80,6 +80,24 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '.order_closed_at_asc' do
+ let_it_be(:older_mr) { create(:merge_request, :closed_last_month) }
+ let_it_be(:newer_mr) { create(:merge_request, :closed_last_month) }
+
+ it 'returns MRs ordered by closed_at ascending' do
+ expect(described_class.order_closed_at_asc).to eq([older_mr, newer_mr])
+ end
+ end
+
+ describe '.order_closed_at_desc' do
+ let_it_be(:older_mr) { create(:merge_request, :closed_last_month) }
+ let_it_be(:newer_mr) { create(:merge_request, :closed_last_month) }
+
+ it 'returns MRs ordered by closed_at descending' do
+ expect(described_class.order_closed_at_desc).to eq([newer_mr, older_mr])
+ end
+ end
+
describe '.with_jira_issue_keys' do
let_it_be(:mr_with_jira_title) { create(:merge_request, :unique_branches, title: 'Fix TEST-123') }
let_it_be(:mr_with_jira_description) { create(:merge_request, :unique_branches, description: 'this closes TEST-321') }
@@ -577,6 +595,26 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(merge_requests).to eq([newer_mr, older_mr])
end
end
+
+ context 'closed_at' do
+ let_it_be(:older_mr) { create(:merge_request, :closed_last_month) }
+ let_it_be(:newer_mr) { create(:merge_request, :closed_last_month) }
+
+ it 'sorts asc' do
+ merge_requests = described_class.sort_by_attribute(:closed_at_asc)
+ expect(merge_requests).to eq([older_mr, newer_mr])
+ end
+
+ it 'sorts desc' do
+ merge_requests = described_class.sort_by_attribute(:closed_at_desc)
+ expect(merge_requests).to eq([newer_mr, older_mr])
+ end
+
+ it 'sorts asc when its closed_at' do
+ merge_requests = described_class.sort_by_attribute(:closed_at)
+ expect(merge_requests).to eq([older_mr, newer_mr])
+ end
+ end
end
describe 'time to merge calculations' do
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index bc592acc80f..f14b9c57eb1 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -538,6 +538,15 @@ RSpec.describe Milestone do
it { is_expected.to match('gitlab-org/gitlab-ce%123') }
it { is_expected.to match('gitlab-org/gitlab-ce%"my-milestone"') }
+
+ context 'when milestone_reference_pattern feature flag is false' do
+ before do
+ stub_feature_flags(milestone_reference_pattern: false)
+ end
+
+ it { is_expected.to match('gitlab-org/gitlab-ce%123') }
+ it { is_expected.to match('gitlab-org/gitlab-ce%"my-milestone"') }
+ end
end
describe '.link_reference_pattern' do
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index 14d28be8d43..e8ed6f1a460 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -41,6 +41,14 @@ RSpec.describe NamespaceSetting, type: :model do
it_behaves_like "doesn't return an error"
end
+
+ context "when it contains javascript tags" do
+ it "gets sanitized properly" do
+ namespace_settings.update!(default_branch_name: "hello<script>alert(1)</script>")
+
+ expect(namespace_settings.default_branch_name).to eq('hello')
+ end
+ end
end
describe '#allow_mfa_for_group' do
@@ -98,4 +106,81 @@ RSpec.describe NamespaceSetting, type: :model do
end
end
end
+
+ describe '#prevent_sharing_groups_outside_hierarchy' do
+ let(:settings) { create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true) }
+ let!(:group) { create(:group, parent: parent, namespace_settings: settings ) }
+
+ subject(:group_sharing_setting) { settings.prevent_sharing_groups_outside_hierarchy }
+
+ context 'when this namespace is a root ancestor' do
+ let(:parent) { nil }
+
+ it 'returns the actual stored value' do
+ expect(group_sharing_setting).to be_truthy
+ end
+ end
+
+ context 'when this namespace is a descendant' do
+ let(:parent) { create(:group) }
+
+ it 'returns the value stored for the parent settings' do
+ expect(group_sharing_setting).to eq(parent.namespace_settings.prevent_sharing_groups_outside_hierarchy)
+ expect(group_sharing_setting).to be_falsey
+ end
+ end
+ end
+
+ describe 'hooks related to group user cap update' do
+ let(:settings) { create(:namespace_settings, new_user_signups_cap: user_cap) }
+ let(:group) { create(:group, namespace_settings: settings) }
+
+ before do
+ allow(group).to receive(:root?).and_return(true)
+ end
+
+ context 'when updating a group with a user cap' do
+ let(:user_cap) { nil }
+
+ it 'also sets share_with_group_lock and prevent_sharing_groups_outside_hierarchy to true' do
+ expect(group.new_user_signups_cap).to be_nil
+ expect(group.share_with_group_lock).to be_falsey
+ expect(settings.prevent_sharing_groups_outside_hierarchy).to be_falsey
+
+ settings.update!(new_user_signups_cap: 10)
+ group.reload
+
+ expect(group.new_user_signups_cap).to eq(10)
+ expect(group.share_with_group_lock).to be_truthy
+ expect(settings.reload.prevent_sharing_groups_outside_hierarchy).to be_truthy
+ end
+
+ it 'has share_with_group_lock and prevent_sharing_groups_outside_hierarchy returning true for descendent groups' do
+ descendent = create(:group, parent: group)
+ desc_settings = descendent.namespace_settings
+
+ expect(descendent.share_with_group_lock).to be_falsey
+ expect(desc_settings.prevent_sharing_groups_outside_hierarchy).to be_falsey
+
+ settings.update!(new_user_signups_cap: 10)
+
+ expect(descendent.reload.share_with_group_lock).to be_truthy
+ expect(desc_settings.reload.prevent_sharing_groups_outside_hierarchy).to be_truthy
+ end
+ end
+
+ context 'when removing a user cap from namespace settings' do
+ let(:user_cap) { 10 }
+
+ it 'leaves share_with_group_lock and prevent_sharing_groups_outside_hierarchy set to true to the related group' do
+ expect(group.share_with_group_lock).to be_truthy
+ expect(settings.prevent_sharing_groups_outside_hierarchy).to be_truthy
+
+ settings.update!(new_user_signups_cap: nil)
+
+ expect(group.reload.share_with_group_lock).to be_truthy
+ expect(settings.reload.prevent_sharing_groups_outside_hierarchy).to be_truthy
+ end
+ end
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index ea1ce067e4d..e2700378f5f 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :package_setting_relation }
it { is_expected.to have_one :onboarding_progress }
it { is_expected.to have_one :admin_note }
+ it { is_expected.to have_many :pending_builds }
end
describe 'validations' do
@@ -207,9 +208,23 @@ RSpec.describe Namespace do
it { is_expected.to include_module(Gitlab::VisibilityLevel) }
it { is_expected.to include_module(Namespaces::Traversal::Recursive) }
it { is_expected.to include_module(Namespaces::Traversal::Linear) }
+ it { is_expected.to include_module(Namespaces::Traversal::RecursiveScopes) }
+ it { is_expected.to include_module(Namespaces::Traversal::LinearScopes) }
end
- it_behaves_like 'linear namespace traversal'
+ context 'traversal scopes' do
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it_behaves_like 'namespace traversal scopes'
+ end
+
+ context 'linear' do
+ it_behaves_like 'namespace traversal scopes'
+ end
+ end
context 'traversal_ids on create' do
context 'default traversal_ids' do
@@ -1152,6 +1167,68 @@ RSpec.describe Namespace do
end
end
+ context 'refreshing project access on updating share_with_group_lock' do
+ let(:group) { create(:group, share_with_group_lock: false) }
+ let(:project) { create(:project, :private, group: group) }
+
+ let_it_be(:shared_with_group_one) { create(:group) }
+ let_it_be(:shared_with_group_two) { create(:group) }
+ let_it_be(:group_one_user) { create(:user) }
+ let_it_be(:group_two_user) { create(:user) }
+
+ subject(:execute_update) { group.update!(share_with_group_lock: true) }
+
+ before do
+ shared_with_group_one.add_developer(group_one_user)
+ shared_with_group_two.add_developer(group_two_user)
+ create(:project_group_link, group: shared_with_group_one, project: project)
+ create(:project_group_link, group: shared_with_group_two, project: project)
+ end
+
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(project.id).once
+
+ execute_update
+ end
+
+ it 'updates authorizations leading to users from shared groups losing access', :sidekiq_inline do
+ expect { execute_update }
+ .to change { group_one_user.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { group_two_user.authorized_projects.include?(project) }.from(true).to(false)
+ end
+
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[group_one_user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
+
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[group_two_user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
+
+ execute_update
+ end
+
+ context 'when the feature flag `specialized_worker_for_group_lock_update_auth_recalculation` is disabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_group_lock_update_auth_recalculation: false)
+ end
+
+ it 'refreshes the permissions of the members of the old and new namespace' do
+ expect { execute_update }
+ .to change { group_one_user.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { group_two_user.authorized_projects.include?(project) }.from(true).to(false)
+ end
+ end
+ end
+
describe '#share_with_group_lock with subgroups' do
context 'when creating a subgroup' do
let(:subgroup) { create(:group, parent: root_group )}
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 2afe9a0f29b..0afdae2fc93 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -1538,4 +1538,24 @@ RSpec.describe Note do
expect(attachment).not_to be_exist
end
end
+
+ describe '#post_processed_cache_key' do
+ let(:note) { build(:note) }
+
+ it 'returns cache key and author cache key by default' do
+ expect(note.post_processed_cache_key).to eq("#{note.cache_key}:#{note.author.cache_key}")
+ end
+
+ context 'when note has redacted_note_html' do
+ let(:redacted_note_html) { 'redacted note html' }
+
+ before do
+ note.redacted_note_html = redacted_note_html
+ end
+
+ it 'returns cache key with redacted_note_html sha' do
+ expect(note.post_processed_cache_key).to eq("#{note.cache_key}:#{note.author.cache_key}:#{Digest::SHA1.hexdigest(redacted_note_html)}")
+ end
+ end
+ end
end
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index 0ecb49e75f3..9289e3beab5 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
context 'when the strategy name is flexibleRollout' do
- valid_parameters = { rollout: '40', groupId: 'mygroup', stickiness: 'DEFAULT' }
+ valid_parameters = { rollout: '40', groupId: 'mygroup', stickiness: 'default' }
where(invalid_parameters: [
nil,
{},
@@ -133,7 +133,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
[
[:rollout, '10'],
- [:stickiness, 'DEFAULT'],
+ [:stickiness, 'default'],
[:groupId, 'mygroup']
].permutation(3).each do |parameters|
it "allows the parameters in the order #{parameters.map { |p| p.first }.join(', ')}" do
@@ -151,7 +151,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
"\n", "\t", "\n10", "20\n", "\n100", "100\n", "\n ", nil])
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- parameters = { stickiness: 'DEFAULT', groupId: 'mygroup', rollout: invalid_value }
+ parameters = { stickiness: 'default', groupId: 'mygroup', rollout: invalid_value }
strategy = described_class.create(feature_flag: feature_flag,
name: 'flexibleRollout',
parameters: parameters)
@@ -165,7 +165,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
where(valid_value: %w[0 1 10 38 100 93])
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- parameters = { stickiness: 'DEFAULT', groupId: 'mygroup', rollout: valid_value }
+ parameters = { stickiness: 'default', groupId: 'mygroup', rollout: valid_value }
strategy = described_class.create(feature_flag: feature_flag,
name: 'flexibleRollout',
parameters: parameters)
@@ -180,7 +180,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
'!bad', '.bad', 'Bad', 'bad1', "", " ", "b" * 33, "ba_d", "ba\nd"])
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- parameters = { stickiness: 'DEFAULT', groupId: invalid_value, rollout: '40' }
+ parameters = { stickiness: 'default', groupId: invalid_value, rollout: '40' }
strategy = described_class.create(feature_flag: feature_flag,
name: 'flexibleRollout',
parameters: parameters)
@@ -192,7 +192,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
where(valid_value: ["somegroup", "anothergroup", "okay", "g", "a" * 32])
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- parameters = { stickiness: 'DEFAULT', groupId: valid_value, rollout: '40' }
+ parameters = { stickiness: 'default', groupId: valid_value, rollout: '40' }
strategy = described_class.create(feature_flag: feature_flag,
name: 'flexibleRollout',
parameters: parameters)
@@ -203,7 +203,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
describe 'stickiness' do
- where(invalid_value: [nil, " ", "default", "DEFAULT\n", "UserId", "USER", "USERID "])
+ where(invalid_value: [nil, " ", "DEFAULT", "DEFAULT\n", "UserId", "USER", "USERID "])
with_them do
it 'must be a string representing a supported stickiness setting' do
parameters = { stickiness: invalid_value, groupId: 'mygroup', rollout: '40' }
@@ -212,12 +212,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
parameters: parameters)
expect(strategy.errors[:parameters]).to eq([
- 'stickiness parameter must be DEFAULT, USERID, SESSIONID, or RANDOM'
+ 'stickiness parameter must be default, userId, sessionId, or random'
])
end
end
- where(valid_value: %w[DEFAULT USERID SESSIONID RANDOM])
+ where(valid_value: %w[default userId sessionId random])
with_them do
it 'must be a string representing a supported stickiness setting' do
parameters = { stickiness: valid_value, groupId: 'mygroup', rollout: '40' }
@@ -425,7 +425,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
user_list: user_list,
parameters: { groupId: 'default',
rollout: '10',
- stickiness: 'DEFAULT' })
+ stickiness: 'default' })
expect(strategy.errors[:user_list]).to eq(['must be blank'])
end
@@ -435,7 +435,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
name: 'flexibleRollout',
parameters: { groupId: 'default',
rollout: '10',
- stickiness: 'DEFAULT' })
+ stickiness: 'default' })
expect(strategy.errors[:user_list]).to be_empty
end
diff --git a/spec/models/packages/npm_spec.rb b/spec/models/packages/npm_spec.rb
new file mode 100644
index 00000000000..fa4adadfe06
--- /dev/null
+++ b/spec/models/packages/npm_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Npm do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '.scope_of' do
+ subject { described_class.scope_of(package_name) }
+
+ where(:package_name, :expected_result) do
+ nil | nil
+ 'test' | nil
+ '@test' | nil
+ 'test/package' | nil
+ '@/package' | nil
+ '@test/package' | 'test'
+ '@test/' | nil
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_result) }
+ end
+ end
+end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index ee0aeb26d50..90910fcb7ce 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -159,4 +159,71 @@ RSpec.describe Packages::PackageFile, type: :model do
expect { subject }.to change { package_file.size }.from(nil).to(3513)
end
end
+
+ context 'update callbacks' do
+ subject { package_file.save! }
+
+ shared_examples 'executing the default callback' do
+ it 'executes the default callback' do
+ expect(package_file).to receive(:remove_previously_stored_file)
+ expect(package_file).not_to receive(:move_in_object_storage)
+
+ subject
+ end
+ end
+
+ context 'with object storage disabled' do
+ let(:package_file) { create(:package_file, file_name: 'file_name.txt') }
+
+ before do
+ stub_package_file_object_storage(enabled: false)
+ end
+
+ it_behaves_like 'executing the default callback'
+
+ context 'with new_file_path set' do
+ before do
+ package_file.new_file_path = 'test'
+ end
+
+ it_behaves_like 'executing the default callback'
+ end
+ end
+
+ context 'with object storage enabled' do
+ let(:package_file) do
+ create(
+ :package_file,
+ file_name: 'file_name.txt',
+ file: CarrierWaveStringFile.new_file(
+ file_content: 'content',
+ filename: 'file_name.txt',
+ content_type: 'text/plain'
+ ),
+ file_store: ::Packages::PackageFileUploader::Store::REMOTE
+ )
+ end
+
+ before do
+ stub_package_file_object_storage(enabled: true)
+ end
+
+ it_behaves_like 'executing the default callback'
+
+ context 'with new_file_path set' do
+ before do
+ package_file.new_file_path = 'test'
+ end
+
+ it 'executes the move_in_object_storage callback' do
+ expect(package_file).not_to receive(:remove_previously_stored_file)
+ expect(package_file).to receive(:move_in_object_storage).and_call_original
+ expect(package_file.file.file).to receive(:copy_to).and_call_original
+ expect(package_file.file.file).to receive(:delete).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 449e30f9fb7..4d4d4ad4fa9 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Packages::Package, type: :model do
include SortingHelper
+ using RSpec::Parameterized::TableSyntax
it_behaves_like 'having unique enum values'
@@ -418,7 +419,7 @@ RSpec.describe Packages::Package, type: :model do
end
end
- describe '#package_already_taken' do
+ describe '#npm_package_already_taken' do
context 'maven package' do
let!(:package) { create(:maven_package) }
@@ -428,6 +429,164 @@ RSpec.describe Packages::Package, type: :model do
expect(new_package).to be_valid
end
end
+
+ context 'npm package' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:second_project) { create(:project, namespace: group)}
+
+ let(:package) { build(:npm_package, project: project, name: name) }
+
+ shared_examples 'validating the first package' do
+ it 'validates the first package' do
+ expect(package).to be_valid
+ end
+ end
+
+ shared_examples 'validating the second package' do
+ it 'validates the second package' do
+ package.save!
+
+ expect(second_package).to be_valid
+ end
+ end
+
+ shared_examples 'not validating the second package' do |field_with_error:|
+ it 'does not validate the second package' do
+ package.save!
+
+ expect(second_package).not_to be_valid
+ case field_with_error
+ when :base
+ expect(second_package.errors.messages[:base]).to eq ['Package already exists']
+ when :name
+ expect(second_package.errors.messages[:name]).to eq ['has already been taken']
+ else
+ raise ArgumentError, "field #{field_with_error} not expected"
+ end
+ end
+ end
+
+ context 'following the naming convention' do
+ let(:name) { "@#{group.path}/test" }
+
+ context 'with the second package in the project of the first package' do
+ let(:second_package) { build(:npm_package, project: project, name: second_package_name, version: second_package_version) }
+
+ context 'with no duplicated name' do
+ let(:second_package_name) { "@#{group.path}/test2" }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicated name' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicate name and duplicated version' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { package.version }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'not validating the second package', field_with_error: :name
+ end
+ end
+
+ context 'with the second package in a different project than the first package' do
+ let(:second_package) { build(:npm_package, project: second_project, name: second_package_name, version: second_package_version) }
+
+ context 'with no duplicated name' do
+ let(:second_package_name) { "@#{group.path}/test2" }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicated name' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicate name and duplicated version' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { package.version }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'not validating the second package', field_with_error: :base
+ end
+ end
+ end
+
+ context 'not following the naming convention' do
+ let(:name) { '@foobar/test' }
+
+ context 'with the second package in the project of the first package' do
+ let(:second_package) { build(:npm_package, project: project, name: second_package_name, version: second_package_version) }
+
+ context 'with no duplicated name' do
+ let(:second_package_name) { "@foobar/test2" }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicated name' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicate name and duplicated version' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { package.version }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'not validating the second package', field_with_error: :name
+ end
+ end
+
+ context 'with the second package in a different project than the first package' do
+ let(:second_package) { build(:npm_package, project: second_project, name: second_package_name, version: second_package_version) }
+
+ context 'with no duplicated name' do
+ let(:second_package_name) { "@foobar/test2" }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicated name' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { '5.0.0' }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+
+ context 'with duplicate name and duplicated version' do
+ let(:second_package_name) { package.name }
+ let(:second_package_version) { package.version }
+
+ it_behaves_like 'validating the first package'
+ it_behaves_like 'validating the second package'
+ end
+ end
+ end
+ end
end
context "recipe uniqueness for conan packages" do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 67ecbe13c1a..8cd831d2f85 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -73,6 +73,14 @@ RSpec.describe PersonalAccessToken do
end
end
+ describe '#expired_but_not_enforced?' do
+ let(:token) { build(:personal_access_token) }
+
+ it 'returns false', :aggregate_failures do
+ expect(token).not_to be_expired_but_not_enforced
+ end
+ end
+
describe 'Redis storage' do
let(:user_id) { 123 }
let(:token) { 'KS3wegQYXBLYhQsciwsj' }
diff --git a/spec/models/postgresql/detached_partition_spec.rb b/spec/models/postgresql/detached_partition_spec.rb
new file mode 100644
index 00000000000..aaa99e842b4
--- /dev/null
+++ b/spec/models/postgresql/detached_partition_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Postgresql::DetachedPartition do
+ describe '#ready_to_drop' do
+ let_it_be(:drop_before) { Postgresql::DetachedPartition.create!(drop_after: 1.day.ago, table_name: 'old_table') }
+ let_it_be(:drop_after) { Postgresql::DetachedPartition.create!(drop_after: 1.day.from_now, table_name: 'new_table') }
+
+ it 'includes partitions that should be dropped before now' do
+ expect(Postgresql::DetachedPartition.ready_to_drop.to_a).to include(drop_before)
+ end
+
+ it 'does not include partitions that should be dropped after now' do
+ expect(Postgresql::DetachedPartition.ready_to_drop.to_a).not_to include(drop_after)
+ end
+ end
+end
diff --git a/spec/models/postgresql/replication_slot_spec.rb b/spec/models/postgresql/replication_slot_spec.rb
index 4bad8a3f0c0..c3b67a2e7b8 100644
--- a/spec/models/postgresql/replication_slot_spec.rb
+++ b/spec/models/postgresql/replication_slot_spec.rb
@@ -60,4 +60,71 @@ RSpec.describe Postgresql::ReplicationSlot do
expect(described_class.lag_too_great?).to eq(false)
end
end
+
+ describe '#max_replication_slots' do
+ it 'returns the maximum number of replication slots' do
+ expect(described_class.max_replication_slots).to be >= 0
+ end
+ end
+
+ context 'with enough slots available' do
+ skip_examples = described_class.max_replication_slots <= described_class.count
+
+ before(:all) do
+ skip('max_replication_slots too small') if skip_examples
+
+ @current_slot_count = ApplicationRecord
+ .connection
+ .execute("SELECT COUNT(*) FROM pg_replication_slots;")
+ .first
+ .fetch('count')
+ .to_i
+
+ @current_unused_count = ApplicationRecord
+ .connection
+ .execute("SELECT COUNT(*) FROM pg_replication_slots WHERE active = 'f';")
+ .first
+ .fetch('count')
+ .to_i
+
+ ApplicationRecord
+ .connection
+ .execute("SELECT * FROM pg_create_physical_replication_slot('test_slot');")
+ end
+
+ after(:all) do
+ unless skip_examples
+ ApplicationRecord
+ .connection
+ .execute("SELECT pg_drop_replication_slot('test_slot');")
+ end
+ end
+
+ describe '#slots_count' do
+ it 'returns the number of replication slots' do
+ expect(described_class.count).to eq(@current_slot_count + 1)
+ end
+ end
+
+ describe '#unused_slots_count' do
+ it 'returns the number of unused replication slots' do
+ expect(described_class.unused_slots_count).to eq(@current_unused_count + 1)
+ end
+ end
+
+ describe '#max_retained_wal' do
+ it 'returns the retained WAL size' do
+ expect(described_class.max_retained_wal).not_to be_nil
+ end
+ end
+
+ describe '#slots_retained_bytes' do
+ it 'returns the number of retained bytes' do
+ slot = described_class.slots_retained_bytes.find {|x| x['slot_name'] == 'test_slot' }
+
+ expect(slot).not_to be_nil
+ expect(slot['retained_bytes']).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 3fd7e57a5db..5f720f8c4f8 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe ProjectFeature do
let(:project) { create(:project) }
let(:user) { create(:user) }
+ it { is_expected.to belong_to(:project) }
+
describe 'PRIVATE_FEATURES_MIN_ACCESS_LEVEL_FOR_PRIVATE_PROJECT' do
it 'has higher level than that of PRIVATE_FEATURES_MIN_ACCESS_LEVEL' do
described_class::PRIVATE_FEATURES_MIN_ACCESS_LEVEL_FOR_PRIVATE_PROJECT.each do |feature, level|
@@ -189,27 +191,57 @@ RSpec.describe ProjectFeature do
end
describe 'container_registry_access_level' do
- context 'when the project is created with container_registry_enabled false' do
- it 'creates project with DISABLED container_registry_access_level' do
- project = create(:project, container_registry_enabled: false)
+ context 'with default value' do
+ let(:project) { Project.new }
+
+ context 'when the default is false' do
+ it 'creates project_feature with `disabled` container_registry_access_level' do
+ stub_config_setting(default_projects_features: { container_registry: false })
- expect(project.project_feature.container_registry_access_level).to eq(described_class::DISABLED)
+ expect(project.project_feature.container_registry_access_level).to eq(described_class::DISABLED)
+ end
end
- end
- context 'when the project is created with container_registry_enabled true' do
- it 'creates project with ENABLED container_registry_access_level' do
- project = create(:project, container_registry_enabled: true)
+ context 'when the default is true' do
+ before do
+ stub_config_setting(default_projects_features: { container_registry: true })
+ end
- expect(project.project_feature.container_registry_access_level).to eq(described_class::ENABLED)
+ it 'creates project_feature with `enabled` container_registry_access_level' do
+ expect(project.project_feature.container_registry_access_level).to eq(described_class::ENABLED)
+ end
+ end
+
+ context 'when the default is nil' do
+ it 'creates project_feature with `disabled` container_registry_access_level' do
+ stub_config_setting(default_projects_features: { container_registry: nil })
+
+ expect(project.project_feature.container_registry_access_level).to eq(described_class::DISABLED)
+ end
end
end
- context 'when the project is created with container_registry_enabled nil' do
- it 'creates project with DISABLED container_registry_access_level' do
- project = create(:project, container_registry_enabled: nil)
+ context 'test build factory' do
+ let(:project) { build(:project, container_registry_access_level: level) }
+
+ subject { project.container_registry_access_level }
+
+ context 'private' do
+ let(:level) { ProjectFeature::PRIVATE }
+
+ it { is_expected.to eq(level) }
+ end
+
+ context 'enabled' do
+ let(:level) { ProjectFeature::ENABLED }
+
+ it { is_expected.to eq(level) }
+ end
+
+ context 'disabled' do
+ let(:level) { ProjectFeature::DISABLED }
- expect(project.project_feature.container_registry_access_level).to eq(described_class::DISABLED)
+ it { is_expected.to eq(level) }
end
end
end
diff --git a/spec/models/project_feature_usage_spec.rb b/spec/models/project_feature_usage_spec.rb
index 6ef407432b0..698c5374e88 100644
--- a/spec/models/project_feature_usage_spec.rb
+++ b/spec/models/project_feature_usage_spec.rb
@@ -128,19 +128,15 @@ RSpec.describe ProjectFeatureUsage, type: :model do
end
context 'ProjectFeatureUsage with DB Load Balancing', :request_store do
- include_context 'clear DB Load Balancing configuration'
-
describe '#log_jira_dvcs_integration_usage' do
let!(:project) { create(:project) }
subject { project.feature_usage }
- context 'database load balancing is configured' do
+ context 'database load balancing is configured', :db_load_balancing do
before do
- # Do not pollute AR for other tests, but rather simulate effect of configure_proxy.
- allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
- ::Gitlab::Database::LoadBalancing.configure_proxy
allow(ActiveRecord::Base).to receive(:connection).and_return(::Gitlab::Database::LoadBalancing.proxy)
+
::Gitlab::Database::LoadBalancing::Session.clear_session
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index efa269cdb5c..d8f3a63d221 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Project, factory_default: :keep do
include ProjectForksHelper
include GitHelpers
include ExternalAuthorizationServiceHelpers
+ include ReloadHelpers
using RSpec::Parameterized::TableSyntax
let_it_be(:namespace) { create_default(:namespace).freeze }
@@ -86,7 +87,6 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:ci_refs) }
it { is_expected.to have_many(:builds) }
- it { is_expected.to have_many(:build_trace_section_names)}
it { is_expected.to have_many(:build_report_results) }
it { is_expected.to have_many(:runner_projects) }
it { is_expected.to have_many(:runners) }
@@ -135,6 +135,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:pipeline_artifacts) }
it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
it { is_expected.to have_many(:timelogs) }
+ it { is_expected.to have_many(:error_tracking_errors).class_name('ErrorTracking::Error') }
+ it { is_expected.to have_many(:error_tracking_client_keys).class_name('ErrorTracking::ClientKey') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -317,7 +319,8 @@ RSpec.describe Project, factory_default: :keep do
end
it 'validates presence of project_feature' do
- project = build(:project, project_feature: nil)
+ project = build(:project)
+ project.project_feature = nil
expect(project).not_to be_valid
end
@@ -654,7 +657,6 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:root_ancestor).to(:namespace).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:last_pipeline).to(:commit).with_arguments(allow_nil: true) }
- it { is_expected.to delegate_method(:allow_editing_commit_messages?).to(:project_setting) }
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
@@ -825,8 +827,6 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#merge_method' do
- using RSpec::Parameterized::TableSyntax
-
where(:ff, :rebase, :method) do
true | true | :ff
true | false | :ff
@@ -1485,33 +1485,21 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '.with_active_jira_integrations' do
- it 'returns the correct integrations' do
- active_jira_integration = create(:jira_integration)
- active_service = create(:service, active: true)
-
- expect(described_class.with_active_jira_integrations).to include(active_jira_integration.project)
- expect(described_class.with_active_jira_integrations).not_to include(active_service.project)
- end
- end
-
describe '.with_jira_dvcs_cloud' do
it 'returns the correct project' do
jira_dvcs_cloud_project = create(:project, :jira_dvcs_cloud)
- jira_dvcs_server_project = create(:project, :jira_dvcs_server)
+ create(:project, :jira_dvcs_server)
- expect(described_class.with_jira_dvcs_cloud).to include(jira_dvcs_cloud_project)
- expect(described_class.with_jira_dvcs_cloud).not_to include(jira_dvcs_server_project)
+ expect(described_class.with_jira_dvcs_cloud).to contain_exactly(jira_dvcs_cloud_project)
end
end
describe '.with_jira_dvcs_server' do
it 'returns the correct project' do
jira_dvcs_server_project = create(:project, :jira_dvcs_server)
- jira_dvcs_cloud_project = create(:project, :jira_dvcs_cloud)
+ create(:project, :jira_dvcs_cloud)
- expect(described_class.with_jira_dvcs_server).to include(jira_dvcs_server_project)
- expect(described_class.with_jira_dvcs_server).not_to include(jira_dvcs_cloud_project)
+ expect(described_class.with_jira_dvcs_server).to contain_exactly(jira_dvcs_server_project)
end
end
@@ -1597,15 +1585,39 @@ RSpec.describe Project, factory_default: :keep do
end
describe '.with_integration' do
- before do
- create_list(:prometheus_project, 2)
+ it 'returns the correct projects' do
+ active_confluence_integration = create(:confluence_integration)
+ inactive_confluence_integration = create(:confluence_integration, active: false)
+ create(:bugzilla_integration)
+
+ expect(described_class.with_integration(::Integrations::Confluence)).to contain_exactly(
+ active_confluence_integration.project,
+ inactive_confluence_integration.project
+ )
end
+ end
- let(:integration) { :prometheus_integration }
+ describe '.with_active_integration' do
+ it 'returns the correct projects' do
+ active_confluence_integration = create(:confluence_integration)
+ create(:confluence_integration, active: false)
+ create(:bugzilla_integration, active: true)
- it 'avoids n + 1' do
- expect { described_class.with_integration(integration).map(&integration) }
- .not_to exceed_query_limit(1)
+ expect(described_class.with_active_integration(::Integrations::Confluence)).to contain_exactly(
+ active_confluence_integration.project
+ )
+ end
+ end
+
+ describe '.include_integration' do
+ it 'avoids n + 1', :aggregate_failures do
+ create(:prometheus_integration)
+ run_test = -> { described_class.include_integration(:prometheus_integration).map(&:prometheus_integration) }
+ control_count = ActiveRecord::QueryRecorder.new { run_test.call }
+ create(:prometheus_integration)
+
+ expect(run_test.call.count).to eq(2)
+ expect { run_test.call }.not_to exceed_query_limit(control_count)
end
end
@@ -1938,8 +1950,6 @@ RSpec.describe Project, factory_default: :keep do
end
context 'when set to INTERNAL in application settings' do
- using RSpec::Parameterized::TableSyntax
-
before do
stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
end
@@ -2000,8 +2010,6 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#default_branch_protected?' do
- using RSpec::Parameterized::TableSyntax
-
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, namespace: namespace) }
@@ -2405,43 +2413,24 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#set_container_registry_access_level' do
+ describe '#container_registry_enabled=' do
let_it_be_with_reload(:project) { create(:project) }
it 'updates project_feature', :aggregate_failures do
- # Simulate an existing project that has container_registry enabled
- project.update_column(:container_registry_enabled, true)
- project.project_feature.update_column(:container_registry_access_level, ProjectFeature::ENABLED)
-
project.update!(container_registry_enabled: false)
- expect(project.read_attribute(:container_registry_enabled)).to eq(false)
expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::DISABLED)
project.update!(container_registry_enabled: true)
- expect(project.read_attribute(:container_registry_enabled)).to eq(true)
expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::ENABLED)
end
-
- it 'rollsback both projects and project_features row in case of error', :aggregate_failures do
- project.update_column(:container_registry_enabled, true)
- project.project_feature.update_column(:container_registry_access_level, ProjectFeature::ENABLED)
-
- allow(project).to receive(:valid?).and_return(false)
-
- expect { project.update!(container_registry_enabled: false) }.to raise_error(ActiveRecord::RecordInvalid)
-
- expect(project.reload.read_attribute(:container_registry_enabled)).to eq(true)
- expect(project.project_feature.reload.container_registry_access_level).to eq(ProjectFeature::ENABLED)
- end
end
describe '#container_registry_enabled' do
let_it_be_with_reload(:project) { create(:project) }
it 'delegates to project_feature', :aggregate_failures do
- project.update_column(:container_registry_enabled, true)
project.project_feature.update_column(:container_registry_access_level, ProjectFeature::DISABLED)
expect(project.container_registry_enabled).to eq(false)
@@ -2870,6 +2859,36 @@ RSpec.describe Project, factory_default: :keep do
it { expect(project.import?).to be true }
end
+ describe '#github_import?' do
+ let_it_be(:project) { build(:project, import_type: 'github') }
+
+ it { expect(project.github_import?).to be true }
+ end
+
+ describe '#github_enterprise_import?' do
+ let_it_be(:github_com_project) do
+ build(
+ :project,
+ import_type: 'github',
+ import_url: 'https://api.github.com/user/repo'
+ )
+ end
+
+ let_it_be(:github_enterprise_project) do
+ build(
+ :project,
+ import_type: 'github',
+ import_url: 'https://othergithub.net/user/repo'
+ )
+ end
+
+ it { expect(github_com_project.github_import?).to be true }
+ it { expect(github_com_project.github_enterprise_import?).to be false }
+
+ it { expect(github_enterprise_project.github_import?).to be true }
+ it { expect(github_enterprise_project.github_enterprise_import?).to be true }
+ end
+
describe '#remove_import_data' do
let(:import_data) { ProjectImportData.new(data: { 'test' => 'some data' }) }
@@ -2912,10 +2931,6 @@ RSpec.describe Project, factory_default: :keep do
subject { project.has_remote_mirror? }
- before do
- allow_any_instance_of(RemoteMirror).to receive(:refresh_remote)
- end
-
it 'returns true when a remote mirror is enabled' do
is_expected.to be_truthy
end
@@ -2932,10 +2947,6 @@ RSpec.describe Project, factory_default: :keep do
delegate :update_remote_mirrors, to: :project
- before do
- allow_any_instance_of(RemoteMirror).to receive(:refresh_remote)
- end
-
it 'syncs enabled remote mirror' do
expect_any_instance_of(RemoteMirror).to receive(:sync)
@@ -3011,29 +3022,106 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#ancestors_upto' do
- let_it_be(:parent) { create(:group) }
- let_it_be(:child) { create(:group, parent: parent) }
- let_it_be(:child2) { create(:group, parent: child) }
- let_it_be(:project) { create(:project, namespace: child2) }
+ shared_context 'project with group ancestry' do
+ let(:parent) { create(:group) }
+ let(:child) { create(:group, parent: parent) }
+ let(:child2) { create(:group, parent: child) }
+ let(:project) { create(:project, namespace: child2) }
- it 'returns all ancestors when no namespace is given' do
- expect(project.ancestors_upto).to contain_exactly(child2, child, parent)
+ before do
+ reload_models(parent, child, child2)
end
+ end
+
+ shared_context 'project with namespace ancestry' do
+ let(:namespace) { create :namespace }
+ let(:project) { create :project, namespace: namespace }
+ end
+
+ shared_examples 'project with group ancestors' do
+ it 'returns all ancestors' do
+ is_expected.to contain_exactly(child2, child, parent)
+ end
+ end
- it 'includes ancestors upto but excluding the given ancestor' do
- expect(project.ancestors_upto(parent)).to contain_exactly(child2, child)
+ shared_examples 'project with ordered group ancestors' do
+ let(:hierarchy_order) { :desc }
+
+ it 'returns ancestors ordered by descending hierarchy' do
+ is_expected.to eq([parent, child, child2])
end
+ end
- describe 'with hierarchy_order' do
- it 'returns ancestors ordered by descending hierarchy' do
- expect(project.ancestors_upto(hierarchy_order: :desc)).to eq([parent, child, child2])
+ shared_examples '#ancestors' do
+ context 'group ancestory' do
+ include_context 'project with group ancestry'
+
+ it_behaves_like 'project with group ancestors' do
+ subject { project.ancestors }
end
- it 'can be used with upto option' do
- expect(project.ancestors_upto(parent, hierarchy_order: :desc)).to eq([child, child2])
+ it_behaves_like 'project with ordered group ancestors' do
+ subject { project.ancestors(hierarchy_order: hierarchy_order) }
end
end
+
+ context 'namespace ancestry' do
+ include_context 'project with namespace ancestry'
+
+ subject { project.ancestors }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ describe '#ancestors' do
+ context 'with linear_project_ancestors feature flag enabled' do
+ before do
+ stub_feature_flags(linear_project_ancestors: true)
+ end
+
+ include_examples '#ancestors'
+ end
+
+ context 'with linear_project_ancestors feature flag disabled' do
+ before do
+ stub_feature_flags(linear_project_ancestors: false)
+ end
+
+ include_examples '#ancestors'
+ end
+ end
+
+ describe '#ancestors_upto' do
+ context 'group ancestry' do
+ include_context 'project with group ancestry'
+
+ it_behaves_like 'project with group ancestors' do
+ subject { project.ancestors_upto }
+ end
+
+ it_behaves_like 'project with ordered group ancestors' do
+ subject { project.ancestors_upto(hierarchy_order: hierarchy_order) }
+ end
+
+ it 'includes ancestors upto but excluding the given ancestor' do
+ expect(project.ancestors_upto(parent)).to contain_exactly(child2, child)
+ end
+
+ describe 'with hierarchy_order' do
+ it 'can be used with upto option' do
+ expect(project.ancestors_upto(parent, hierarchy_order: :desc)).to eq([child, child2])
+ end
+ end
+ end
+
+ context 'namespace ancestry' do
+ include_context 'project with namespace ancestry'
+
+ subject { project.ancestors_upto }
+
+ it { is_expected.to be_empty }
+ end
end
describe '#root_ancestor' do
@@ -5194,11 +5282,26 @@ RSpec.describe Project, factory_default: :keep do
expect(InternalId).to receive(:flush_records!).with(project: project)
expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:repository_size])
expect(DetectRepositoryLanguagesWorker).to receive(:perform_async).with(project.id)
- expect(project).to receive(:write_repository_config)
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:perform_async).with(project.id)
+ expect(project).to receive(:set_full_path)
project.after_import
end
+ context 'project authorizations refresh' do
+ it 'updates user authorizations' do
+ create(:import_state, :started, project: project)
+
+ member = build(:project_member, project: project)
+ member.importing = true
+ member.save!
+
+ Sidekiq::Testing.inline! { project.after_import }
+
+ expect(member.user.authorized_project?(project)).to be true
+ end
+ end
+
context 'branch protection' do
let_it_be(:namespace) { create(:namespace) }
@@ -5263,25 +5366,25 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#write_repository_config' do
+ describe '#set_full_path' do
let_it_be(:project) { create(:project, :repository) }
it 'writes full path in .git/config when key is missing' do
- project.write_repository_config
+ project.set_full_path
expect(rugged_config['gitlab.fullpath']).to eq project.full_path
end
it 'updates full path in .git/config when key is present' do
- project.write_repository_config(gl_full_path: 'old/path')
+ project.set_full_path(gl_full_path: 'old/path')
- expect { project.write_repository_config }.to change { rugged_config['gitlab.fullpath'] }.from('old/path').to(project.full_path)
+ expect { project.set_full_path }.to change { rugged_config['gitlab.fullpath'] }.from('old/path').to(project.full_path)
end
it 'does not raise an error with an empty repository' do
project = create(:project_empty_repo)
- expect { project.write_repository_config }.not_to raise_error
+ expect { project.set_full_path }.not_to raise_error
end
end
@@ -5911,10 +6014,9 @@ RSpec.describe Project, factory_default: :keep do
end
end
- context 'with an instance-level and template integrations' do
+ context 'with an instance-level integration' do
before do
create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/')
- create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/')
end
it 'builds the integration from the instance integration' do
@@ -5922,17 +6024,7 @@ RSpec.describe Project, factory_default: :keep do
end
end
- context 'with a template integration and no instance-level' do
- before do
- create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/')
- end
-
- it 'builds the integration from the template' do
- expect(subject.find_or_initialize_integration('prometheus').api_url).to eq('https://prometheus.template.com/')
- end
- end
-
- context 'without an exisiting integration, or instance-level or template' do
+ context 'without an existing integration or instance-level' do
it 'builds the integration' do
expect(subject.find_or_initialize_integration('prometheus')).to be_a(::Integrations::Prometheus)
expect(subject.find_or_initialize_integration('prometheus').api_url).to be_nil
@@ -6834,28 +6926,46 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#package_already_taken?' do
- let(:namespace) { create(:namespace) }
- let(:project) { create(:project, :public, namespace: namespace) }
- let!(:package) { create(:npm_package, project: project, name: "@#{namespace.path}/foo") }
+ let_it_be(:namespace) { create(:namespace, path: 'test') }
+ let_it_be(:project) { create(:project, :public, namespace: namespace) }
+ let_it_be(:package) { create(:npm_package, project: project, name: "@#{namespace.path}/foo", version: '1.2.3') }
- context 'no package exists with the same name' do
- it 'returns false' do
- result = project.package_already_taken?("@#{namespace.path}/bar")
- expect(result).to be false
+ subject { project.package_already_taken?(package_name, package_version, package_type: :npm) }
+
+ context 'within the package project' do
+ where(:package_name, :package_version, :expected_result) do
+ '@test/bar' | '1.2.3' | false
+ '@test/bar' | '5.5.5' | false
+ '@test/foo' | '1.2.3' | false
+ '@test/foo' | '5.5.5' | false
end
- it 'returns false if it is the project that the package belongs to' do
- result = project.package_already_taken?("@#{namespace.path}/foo")
- expect(result).to be false
+ with_them do
+ it { is_expected.to eq expected_result}
end
end
- context 'a package already exists with the same name' do
- let(:alt_project) { create(:project, :public, namespace: namespace) }
+ context 'within a different project' do
+ let_it_be(:alt_project) { create(:project, :public, namespace: namespace) }
- it 'returns true' do
- result = alt_project.package_already_taken?("@#{namespace.path}/foo")
- expect(result).to be true
+ subject { alt_project.package_already_taken?(package_name, package_version, package_type: :npm) }
+
+ where(:package_name, :package_version, :expected_result) do
+ '@test/bar' | '1.2.3' | false
+ '@test/bar' | '5.5.5' | false
+ '@test/foo' | '1.2.3' | true
+ '@test/foo' | '5.5.5' | false
+ end
+
+ with_them do
+ it { is_expected.to eq expected_result}
+ end
+
+ context 'for a different package type' do
+ it 'returns false' do
+ result = alt_project.package_already_taken?(package.name, package.version, package_type: :nuget)
+ expect(result).to be false
+ end
end
end
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index cb1baa02e96..ba769e830fd 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -401,12 +401,6 @@ RSpec.describe ProjectStatistics do
let(:stat) { :build_artifacts_size }
it_behaves_like 'a statistic that increases storage_size asynchronously'
-
- it_behaves_like 'a statistic that increases storage_size' do
- before do
- stub_feature_flags(efficient_counter_attribute: false)
- end
- end
end
context 'when adjusting :pipeline_artifacts_size' do
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index ce75e68de32..8eab50abd8c 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -193,6 +193,36 @@ RSpec.describe ProjectTeam do
end
end
+ describe '#members_with_access_levels' do
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: maintainer.namespace) }
+ let_it_be(:access_levels) { [Gitlab::Access::DEVELOPER, Gitlab::Access::MAINTAINER] }
+
+ subject(:members_with_access_levels) { project.team.members_with_access_levels(access_levels) }
+
+ before do
+ project.team.add_developer(developer)
+ project.team.add_maintainer(maintainer)
+ project.team.add_guest(guest)
+ end
+
+ context 'with access_levels' do
+ it 'filters members who have given access levels' do
+ expect(members_with_access_levels).to contain_exactly(developer, maintainer)
+ end
+ end
+
+ context 'without access_levels' do
+ let_it_be(:access_levels) { [] }
+
+ it 'returns empty array' do
+ expect(members_with_access_levels).to be_empty
+ end
+ end
+ end
+
describe '#add_users' do
let(:user1) { create(:user) }
let(:user2) { create(:user) }
@@ -307,7 +337,7 @@ RSpec.describe ProjectTeam do
it { expect(project.team.max_member_access(nonmember.id)).to eq(Gitlab::Access::NO_ACCESS) }
it { expect(project.team.max_member_access(requester.id)).to eq(Gitlab::Access::NO_ACCESS) }
- context 'but share_with_group_lock is true' do
+ context 'but share_with_group_lock is true', :sidekiq_inline do
before do
project.namespace.update!(share_with_group_lock: true)
end
diff --git a/spec/models/projects/ci_feature_usage_spec.rb b/spec/models/projects/ci_feature_usage_spec.rb
new file mode 100644
index 00000000000..674faa6e7d1
--- /dev/null
+++ b/spec/models/projects/ci_feature_usage_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::CiFeatureUsage, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:feature) }
+ end
+
+ describe '.insert_usage' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when data is not a duplicate' do
+ it 'creates a new record' do
+ expect { described_class.insert_usage(project_id: project.id, default_branch: false, feature: :code_coverage) }
+ .to change { described_class.count }
+
+ expect(described_class.first).to have_attributes(
+ project_id: project.id,
+ default_branch: false,
+ feature: 'code_coverage'
+ )
+ end
+ end
+
+ context 'when data is a duplicate' do
+ before do
+ create(:project_ci_feature_usage, project: project, default_branch: false, feature: :code_coverage)
+ end
+
+ it 'does not create a new record' do
+ expect { described_class.insert_usage(project_id: project.id, default_branch: false, feature: :code_coverage) }
+ .not_to change { described_class.count }
+ end
+ end
+ end
+end
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index a5441e2f47b..14a43df4229 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
- let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')).grep(/\d*\_(\d*\_\d*)\.yml$/) }
+ let(:fixture_dir_glob) { Dir.glob(File.join(Rails.root, 'spec', 'fixtures', 'whats_new', '*.yml')).grep(/\d*\_(\d*\_\d*)\.yml$/) }
before do
allow(Dir).to receive(:glob).with(Rails.root.join('data', 'whats_new', '*.yml')).and_return(fixture_dir_glob)
@@ -193,4 +193,12 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
expect(subject).to eq('Free')
end
end
+
+ describe '.file_paths' do
+ it 'joins relative file paths with the root path to avoid caching the root url' do
+ allow(described_class).to receive(:relative_file_paths).and_return([+'/a.yml'])
+
+ expect(described_class.file_paths.first).to eq("#{Rails.root}/a.yml")
+ end
+ end
end
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index a64b01967ef..382359ccb17 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -93,51 +93,14 @@ RSpec.describe RemoteMirror, :mailer do
expect(mirror.credentials).to eq({ user: 'foo', password: 'bar' })
end
- it 'updates the remote config if credentials changed' do
+ it 'does not update the repository config if credentials changed' do
mirror = create_mirror(url: 'http://foo:bar@test.com')
repo = mirror.project.repository
+ old_config = rugged_repo(repo).config
mirror.update_attribute(:url, 'http://foo:baz@test.com')
- config = rugged_repo(repo).config
- expect(config["remote.#{mirror.remote_name}.url"]).to eq('http://foo:baz@test.com')
- end
-
- it 'removes previous remote' do
- mirror = create_mirror(url: 'http://foo:bar@test.com')
-
- expect(RepositoryRemoveRemoteWorker).to receive(:perform_async).with(mirror.project.id, mirror.remote_name).and_call_original
-
- mirror.update(url: 'http://test.com')
- end
- end
- end
-
- describe '#remote_name' do
- context 'when remote name is persisted in the database' do
- it 'returns remote name with random value' do
- allow(SecureRandom).to receive(:hex).and_return('secret')
-
- remote_mirror = create(:remote_mirror)
-
- expect(remote_mirror.remote_name).to eq('remote_mirror_secret')
- end
- end
-
- context 'when remote name is not persisted in the database' do
- it 'returns remote name with remote mirror id' do
- remote_mirror = create(:remote_mirror)
- remote_mirror.remote_name = nil
-
- expect(remote_mirror.remote_name).to eq("remote_mirror_#{remote_mirror.id}")
- end
- end
-
- context 'when remote is not persisted in the database' do
- it 'returns nil' do
- remote_mirror = build(:remote_mirror, remote_name: nil)
-
- expect(remote_mirror.remote_name).to be_nil
+ expect(rugged_repo(repo).config.to_hash).to eq(old_config.to_hash)
end
end
end
@@ -157,34 +120,19 @@ RSpec.describe RemoteMirror, :mailer do
end
describe '#update_repository' do
- shared_examples 'an update' do
- it 'performs update including options' do
- git_remote_mirror = stub_const('Gitlab::Git::RemoteMirror', spy)
- mirror = build(:remote_mirror)
-
- expect(mirror).to receive(:options_for_update).and_return(keep_divergent_refs: true)
- mirror.update_repository(inmemory_remote: inmemory)
-
- expect(git_remote_mirror).to have_received(:new).with(
- mirror.project.repository.raw,
- mirror.remote_name,
- inmemory ? mirror.url : nil,
- keep_divergent_refs: true
- )
- expect(git_remote_mirror).to have_received(:update)
- end
- end
+ it 'performs update including options' do
+ git_remote_mirror = stub_const('Gitlab::Git::RemoteMirror', spy)
+ mirror = build(:remote_mirror)
- context 'with inmemory remote' do
- let(:inmemory) { true }
+ expect(mirror).to receive(:options_for_update).and_return(keep_divergent_refs: true)
+ mirror.update_repository
- it_behaves_like 'an update'
- end
-
- context 'with on-disk remote' do
- let(:inmemory) { false }
-
- it_behaves_like 'an update'
+ expect(git_remote_mirror).to have_received(:new).with(
+ mirror.project.repository.raw,
+ mirror.url,
+ keep_divergent_refs: true
+ )
+ expect(git_remote_mirror).to have_received(:update)
end
end
@@ -303,10 +251,10 @@ RSpec.describe RemoteMirror, :mailer do
end
context 'when remote mirror gets destroyed' do
- it 'removes remote' do
+ it 'does not remove the remote' do
mirror = create_mirror(url: 'http://foo:bar@test.com')
- expect(RepositoryRemoveRemoteWorker).to receive(:perform_async).with(mirror.project.id, mirror.remote_name).and_call_original
+ expect(RepositoryRemoveRemoteWorker).not_to receive(:perform_async)
mirror.destroy!
end
@@ -402,30 +350,6 @@ RSpec.describe RemoteMirror, :mailer do
end
end
- describe '#ensure_remote!' do
- let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
- let(:project) { remote_mirror.project }
- let(:repository) { project.repository }
-
- it 'adds a remote multiple times with no errors' do
- expect(repository).to receive(:add_remote).with(remote_mirror.remote_name, remote_mirror.url).twice.and_call_original
-
- 2.times do
- remote_mirror.ensure_remote!
- end
- end
-
- context 'SSH public-key authentication' do
- it 'omits the password from the URL' do
- remote_mirror.update!(auth_method: 'ssh_public_key', url: 'ssh://git:pass@example.com')
-
- expect(repository).to receive(:add_remote).with(remote_mirror.remote_name, 'ssh://git@example.com')
-
- remote_mirror.ensure_remote!
- end
- end
- end
-
describe '#url=' do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 452eafe733f..211e448b6cf 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -398,32 +398,47 @@ RSpec.describe Repository do
end
describe '#new_commits' do
- let_it_be(:project) { create(:project, :repository) }
+ shared_examples '#new_commits' do
+ let_it_be(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
+ let(:repository) { project.repository }
- subject { repository.new_commits(rev) }
+ subject { repository.new_commits(rev, allow_quarantine: allow_quarantine) }
- context 'when there are no new commits' do
- let(:rev) { repository.commit.id }
+ context 'when there are no new commits' do
+ let(:rev) { repository.commit.id }
- it 'returns an empty array' do
- expect(subject).to eq([])
+ it 'returns an empty array' do
+ expect(subject).to eq([])
+ end
end
- end
- context 'when new commits are found' do
- let(:branch) { 'orphaned-branch' }
- let!(:rev) { repository.commit(branch).id }
+ context 'when new commits are found' do
+ let(:branch) { 'orphaned-branch' }
+ let!(:rev) { repository.commit(branch).id }
+ let(:allow_quarantine) { false }
- it 'returns the commits' do
- repository.delete_branch(branch)
+ it 'returns the commits' do
+ repository.delete_branch(branch)
- expect(subject).not_to be_empty
- expect(subject).to all( be_a(::Commit) )
- expect(subject.size).to eq(1)
+ expect(subject).not_to be_empty
+ expect(subject).to all( be_a(::Commit) )
+ expect(subject.size).to eq(1)
+ end
end
end
+
+ context 'with quarantine' do
+ let(:allow_quarantine) { true }
+
+ it_behaves_like '#new_commits'
+ end
+
+ context 'without quarantine' do
+ let(:allow_quarantine) { false }
+
+ it_behaves_like '#new_commits'
+ end
end
describe '#commits_by' do
@@ -1094,99 +1109,16 @@ RSpec.describe Repository do
end
end
- describe '#async_remove_remote' do
- before do
- masterrev = repository.find_branch('master').dereferenced_target
- create_remote_branch('joe', 'remote_branch', masterrev)
- end
-
- context 'when worker is scheduled successfully' do
- before do
- masterrev = repository.find_branch('master').dereferenced_target
- create_remote_branch('remote_name', 'remote_branch', masterrev)
-
- allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return('1234')
- end
-
- it 'returns job_id' do
- expect(repository.async_remove_remote('joe')).to eq('1234')
- end
- end
-
- context 'when worker does not schedule successfully' do
- before do
- allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return(nil)
- end
-
- it 'returns nil' do
- expect(Gitlab::AppLogger).to receive(:info).with("Remove remote job failed to create for #{project.id} with remote name joe.")
-
- expect(repository.async_remove_remote('joe')).to be_nil
- end
- end
- end
-
describe '#fetch_as_mirror' do
let(:url) { "http://example.com" }
- context 'when :fetch_remote_params is enabled' do
- let(:remote_name) { "remote-name" }
-
- before do
- stub_feature_flags(fetch_remote_params: true)
- end
-
- it 'fetches the URL without creating a remote' do
- expect(repository).not_to receive(:add_remote)
- expect(repository)
- .to receive(:fetch_remote)
- .with(remote_name, url: url, forced: false, prune: true, refmap: :all_refs)
- .and_return(nil)
-
- repository.fetch_as_mirror(url, remote_name: remote_name)
- end
- end
-
- context 'when :fetch_remote_params is disabled' do
- before do
- stub_feature_flags(fetch_remote_params: false)
- end
-
- shared_examples 'a fetch' do
- it 'adds and fetches a remote' do
- expect(repository)
- .to receive(:add_remote)
- .with(expected_remote, url, mirror_refmap: :all_refs)
- .and_return(nil)
- expect(repository)
- .to receive(:fetch_remote)
- .with(expected_remote, forced: false, prune: true)
- .and_return(nil)
-
- repository.fetch_as_mirror(url, remote_name: remote_name)
- end
- end
-
- context 'with temporary remote' do
- let(:remote_name) { nil }
- let(:expected_remote_suffix) { "123456" }
- let(:expected_remote) { "tmp-#{expected_remote_suffix}" }
-
- before do
- expect(repository)
- .to receive(:async_remove_remote).with(expected_remote).and_return(nil)
- allow(SecureRandom).to receive(:hex).and_return(expected_remote_suffix)
- end
-
- it_behaves_like 'a fetch'
- end
-
- context 'with remote name' do
- let(:remote_name) { "foo" }
- let(:expected_remote) { "foo" }
+ it 'fetches the URL without creating a remote' do
+ expect(repository)
+ .to receive(:fetch_remote)
+ .with(url, forced: false, prune: true, refmap: :all_refs, http_authorization_header: "")
+ .and_return(nil)
- it_behaves_like 'a fetch'
- end
+ repository.fetch_as_mirror(url)
end
end
@@ -2605,24 +2537,46 @@ RSpec.describe Repository do
end
shared_examples '#tree' do
+ subject { repository.tree(sha, path, recursive: recursive, pagination_params: pagination_params) }
+
+ let(:sha) { :head }
+ let(:path) { nil }
+ let(:recursive) { false }
+ let(:pagination_params) { nil }
+
context 'using a non-existing repository' do
before do
allow(repository).to receive(:head_commit).and_return(nil)
end
- it 'returns nil' do
- expect(repository.tree(:head)).to be_nil
- end
+ it { is_expected.to be_nil }
+
+ context 'when path is defined' do
+ let(:path) { 'README.md' }
- it 'returns nil when using a path' do
- expect(repository.tree(:head, 'README.md')).to be_nil
+ it { is_expected.to be_nil }
end
end
context 'using an existing repository' do
- it 'returns a Tree' do
- expect(repository.tree(:head)).to be_an_instance_of(Tree)
- expect(repository.tree('v1.1.1')).to be_an_instance_of(Tree)
+ it { is_expected.to be_an_instance_of(Tree) }
+
+ context 'when different sha is set' do
+ let(:sha) { 'v1.1.1' }
+
+ it { is_expected.to be_an_instance_of(Tree) }
+ end
+
+ context 'when recursive is true' do
+ let(:recursive) { true }
+
+ it { is_expected.to be_an_instance_of(Tree) }
+ end
+
+ context 'with pagination parameters' do
+ let(:pagination_params) { { limit: 10, page_token: nil } }
+
+ it { is_expected.to be_an_instance_of(Tree) }
end
end
end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 19d3895177f..4e20a83f18e 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -431,7 +431,7 @@ RSpec.describe Snippet do
subject do
snippet.assign_attributes(title: title)
- snippet.check_for_spam?
+ snippet.check_for_spam?(user: snippet.author)
end
context 'when public and spammable attributes changed' do
@@ -455,7 +455,7 @@ RSpec.describe Snippet do
snippet.save!
snippet.visibility_level = Snippet::PUBLIC
- expect(snippet.check_for_spam?).to be_truthy
+ expect(snippet.check_for_spam?(user: snippet.author)).to be_truthy
end
end
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index 9d6fda1d2a9..d6c31307e30 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -70,8 +70,9 @@ RSpec.describe Timelog do
let_it_be(:medium_time_ago) { 15.days.ago }
let_it_be(:long_time_ago) { 65.days.ago }
- let_it_be(:timelog) { create(:issue_timelog, spent_at: long_time_ago) }
- let_it_be(:timelog1) { create(:issue_timelog, spent_at: medium_time_ago, issue: group_issue) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:timelog) { create(:issue_timelog, spent_at: long_time_ago, user: user) }
+ let_it_be(:timelog1) { create(:issue_timelog, spent_at: medium_time_ago, issue: group_issue, user: user) }
let_it_be(:timelog2) { create(:issue_timelog, spent_at: short_time_ago, issue: subgroup_issue) }
let_it_be(:timelog3) { create(:merge_request_timelog, spent_at: long_time_ago) }
let_it_be(:timelog4) { create(:merge_request_timelog, spent_at: medium_time_ago, merge_request: group_merge_request) }
@@ -83,6 +84,25 @@ RSpec.describe Timelog do
end
end
+ describe '.for_user' do
+ it 'return timelogs created by user' do
+ expect(described_class.for_user(user)).to contain_exactly(timelog, timelog1)
+ end
+ end
+
+ describe '.in_project' do
+ it 'returns timelogs created for project issues and merge requests' do
+ project = create(:project, :empty_repo)
+
+ create(:issue_timelog)
+ create(:merge_request_timelog)
+ timelog1 = create(:issue_timelog, issue: create(:issue, project: project))
+ timelog2 = create(:merge_request_timelog, merge_request: create(:merge_request, source_project: project))
+
+ expect(described_class.in_project(project.id)).to contain_exactly(timelog1, timelog2)
+ end
+ end
+
describe '.at_or_after' do
it 'returns timelogs at the time limit' do
timelogs = described_class.at_or_after(short_time_ago)
diff --git a/spec/models/tree_spec.rb b/spec/models/tree_spec.rb
index 1522d836f76..b7a8276ec55 100644
--- a/spec/models/tree_spec.rb
+++ b/spec/models/tree_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Tree do
let(:repository) { create(:project, :repository).repository }
let(:sha) { repository.root_ref }
- subject { described_class.new(repository, '54fcc214') }
+ subject(:tree) { described_class.new(repository, '54fcc214') }
describe '#readme' do
before do
@@ -66,4 +66,10 @@ RSpec.describe Tree do
expect(subject.readme.name).to eq 'README.md'
end
end
+
+ describe '#cursor' do
+ subject { tree.cursor }
+
+ it { is_expected.to be_an_instance_of(Gitaly::PaginationCursor) }
+ end
end
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
index c2d9b916a1c..3c87dcdcbd9 100644
--- a/spec/models/user_detail_spec.rb
+++ b/spec/models/user_detail_spec.rb
@@ -16,6 +16,11 @@ RSpec.describe UserDetail do
it { is_expected.to validate_length_of(:pronouns).is_at_most(50) }
end
+ describe '#pronunciation' do
+ it { is_expected.not_to validate_presence_of(:pronunciation) }
+ it { is_expected.to validate_length_of(:pronunciation).is_at_most(255) }
+ end
+
describe '#bio' do
it { is_expected.to validate_length_of(:bio).is_at_most(255) }
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 0eb769c65cd..d73bc95a2f2 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -77,6 +77,9 @@ RSpec.describe User do
it { is_expected.to delegate_method(:pronouns).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:pronouns=).to(:user_detail).with_arguments(:args).allow_nil }
+ it { is_expected.to delegate_method(:pronunciation).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:pronunciation=).to(:user_detail).with_arguments(:args).allow_nil }
+
it { is_expected.to delegate_method(:bio).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:bio=).to(:user_detail).with_arguments(:args).allow_nil }
it { is_expected.to delegate_method(:bio_html).to(:user_detail).allow_nil }
@@ -89,6 +92,7 @@ RSpec.describe User do
it { is_expected.to have_one(:atlassian_identity) }
it { is_expected.to have_one(:user_highest_role) }
it { is_expected.to have_one(:credit_card_validation) }
+ it { is_expected.to have_one(:banned_user) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
it { is_expected.to have_many(:project_members) }
@@ -120,6 +124,7 @@ RSpec.describe User do
it { is_expected.to have_many(:merge_request_reviewers).inverse_of(:reviewer) }
it { is_expected.to have_many(:created_custom_emoji).inverse_of(:creator) }
it { is_expected.to have_many(:in_product_marketing_emails) }
+ it { is_expected.to have_many(:timelogs) }
describe "#user_detail" do
it 'does not persist `user_detail` by default' do
@@ -145,6 +150,12 @@ RSpec.describe User do
expect(user.pronouns).to eq(user.user_detail.pronouns)
end
+ it 'delegates `pronunciation` to `user_detail`' do
+ user = create(:user, name: 'Example', pronunciation: 'uhg-zaam-pl')
+
+ expect(user.pronunciation).to eq(user.user_detail.pronunciation)
+ end
+
it 'creates `user_detail` when `bio` is first updated' do
user = create(:user)
@@ -485,7 +496,7 @@ RSpec.describe User do
describe 'email' do
context 'when no signup domains allowed' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_allowlist).and_return([])
+ stub_application_setting(domain_allowlist: [])
end
it 'accepts any email' do
@@ -496,7 +507,7 @@ RSpec.describe User do
context 'bad regex' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_allowlist).and_return(['([a-zA-Z0-9]+)+\.com'])
+ stub_application_setting(domain_allowlist: ['([a-zA-Z0-9]+)+\.com'])
end
it 'does not hang on evil input' do
@@ -510,7 +521,7 @@ RSpec.describe User do
context 'when a signup domain is allowed and subdomains are allowed' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_allowlist).and_return(['example.com', '*.example.com'])
+ stub_application_setting(domain_allowlist: ['example.com', '*.example.com'])
end
it 'accepts info@example.com' do
@@ -526,12 +537,13 @@ RSpec.describe User do
it 'rejects example@test.com' do
user = build(:user, email: "example@test.com")
expect(user).to be_invalid
+ expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
end
end
context 'when a signup domain is allowed and subdomains are not allowed' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_allowlist).and_return(['example.com'])
+ stub_application_setting(domain_allowlist: ['example.com'])
end
it 'accepts info@example.com' do
@@ -542,11 +554,13 @@ RSpec.describe User do
it 'rejects info@test.example.com' do
user = build(:user, email: "info@test.example.com")
expect(user).to be_invalid
+ expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
end
it 'rejects example@test.com' do
user = build(:user, email: "example@test.com")
expect(user).to be_invalid
+ expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
end
it 'accepts example@test.com when added by another user' do
@@ -557,13 +571,13 @@ RSpec.describe User do
context 'domain denylist' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_denylist_enabled?).and_return(true)
- allow_any_instance_of(ApplicationSetting).to receive(:domain_denylist).and_return(['example.com'])
+ stub_application_setting(domain_denylist_enabled: true)
+ stub_application_setting(domain_denylist: ['example.com'])
end
context 'bad regex' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_denylist).and_return(['([a-zA-Z0-9]+)+\.com'])
+ stub_application_setting(domain_denylist: ['([a-zA-Z0-9]+)+\.com'])
end
it 'does not hang on evil input' do
@@ -584,6 +598,7 @@ RSpec.describe User do
it 'rejects info@example.com' do
user = build(:user, email: 'info@example.com')
expect(user).not_to be_valid
+ expect(user.errors.messages[:email].first).to eq(_('is not from an allowed domain.'))
end
it 'accepts info@example.com when added by another user' do
@@ -594,8 +609,8 @@ RSpec.describe User do
context 'when a signup domain is denied but a wildcard subdomain is allowed' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_denylist).and_return(['test.example.com'])
- allow_any_instance_of(ApplicationSetting).to receive(:domain_allowlist).and_return(['*.example.com'])
+ stub_application_setting(domain_denylist: ['test.example.com'])
+ stub_application_setting(domain_allowlist: ['*.example.com'])
end
it 'gives priority to allowlist and allow info@test.example.com' do
@@ -606,7 +621,7 @@ RSpec.describe User do
context 'with both lists containing a domain' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:domain_allowlist).and_return(['test.com'])
+ stub_application_setting(domain_allowlist: ['test.com'])
end
it 'accepts info@test.com' do
@@ -617,6 +632,7 @@ RSpec.describe User do
it 'rejects info@example.com' do
user = build(:user, email: 'info@example.com')
expect(user).not_to be_valid
+ expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
end
end
end
@@ -688,7 +704,7 @@ RSpec.describe User do
user.notification_email = email.email
expect(user).to be_invalid
- expect(user.errors[:notification_email]).to include('is not an email you own')
+ expect(user.errors[:notification_email]).to include(_('must be an email you have verified'))
end
end
@@ -707,7 +723,7 @@ RSpec.describe User do
user.public_email = email.email
expect(user).to be_invalid
- expect(user.errors[:public_email]).to include('is not an email you own')
+ expect(user.errors[:public_email]).to include(_('must be an email you have verified'))
end
end
@@ -1798,6 +1814,15 @@ RSpec.describe User do
it { expect(user.namespaces).to contain_exactly(user.namespace, group) }
it { expect(user.manageable_namespaces).to contain_exactly(user.namespace, group) }
+ context 'with owned groups only' do
+ before do
+ other_group = create(:group)
+ other_group.add_developer(user)
+ end
+
+ it { expect(user.namespaces(owned_only: true)).to contain_exactly(user.namespace, group) }
+ end
+
context 'with child groups' do
let!(:subgroup) { create(:group, parent: group) }
@@ -1950,6 +1975,42 @@ RSpec.describe User do
end
end
+ describe 'banning and unbanning a user', :aggregate_failures do
+ let(:user) { create(:user) }
+
+ context 'banning a user' do
+ it 'bans and blocks the user' do
+ user.ban
+
+ expect(user.banned?).to eq(true)
+ expect(user.blocked?).to eq(true)
+ end
+
+ it 'creates a BannedUser record' do
+ expect { user.ban }.to change { Users::BannedUser.count }.by(1)
+ expect(Users::BannedUser.last.user_id).to eq(user.id)
+ end
+ end
+
+ context 'unbanning a user' do
+ before do
+ user.ban!
+ end
+
+ it 'activates the user' do
+ user.activate
+
+ expect(user.banned?).to eq(false)
+ expect(user.active?).to eq(true)
+ end
+
+ it 'deletes the BannedUser record' do
+ expect { user.activate }.to change { Users::BannedUser.count }.by(-1)
+ expect(Users::BannedUser.where(user_id: user.id)).not_to exist
+ end
+ end
+ end
+
describe '.filter_items' do
let(:user) { double }
@@ -3064,6 +3125,19 @@ RSpec.describe User do
end
end
+ describe '#notification_email' do
+ let(:email) { 'gonzo@muppets.com' }
+
+ context 'when the column in the database is null' do
+ subject { create(:user, email: email, notification_email: nil) }
+
+ it 'defaults to the primary email' do
+ expect(subject.read_attribute(:notification_email)).to be nil
+ expect(subject.notification_email).to eq(email)
+ end
+ end
+ end
+
describe '.find_by_private_commit_email' do
context 'with email' do
let_it_be(:user) { create(:user) }
@@ -3993,6 +4067,14 @@ RSpec.describe User do
]
end
end
+
+ context 'when the user is not saved' do
+ let(:user) { build(:user) }
+
+ it 'returns empty when there are no groups or ancestor groups for the user' do
+ is_expected.to eq([])
+ end
+ end
end
describe '#refresh_authorized_projects', :clean_gitlab_redis_shared_state do
@@ -4254,6 +4336,14 @@ RSpec.describe User do
expect(user.two_factor_grace_period).to be 48
end
end
+
+ context 'when the user is not saved' do
+ let(:user) { build(:user) }
+
+ it 'does not raise an ActiveRecord::StatementInvalid statement exception' do
+ expect { user.update_two_factor_requirement }.not_to raise_error
+ end
+ end
end
describe '#source_groups_of_two_factor_authentication_requirement' do
diff --git a/spec/models/users/banned_user_spec.rb b/spec/models/users/banned_user_spec.rb
new file mode 100644
index 00000000000..b55c4821d05
--- /dev/null
+++ b/spec/models/users/banned_user_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::BannedUser do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe 'validations' do
+ before do
+ create(:user, :banned)
+ end
+
+ it { is_expected.to validate_presence_of(:user) }
+
+ it 'validates uniqueness of banned user id' do
+ is_expected.to validate_uniqueness_of(:user_id).with_message("banned user already exists")
+ end
+ end
+end
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
index 772d875d69e..a9ddd86677c 100644
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -19,6 +19,12 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
it { is_expected.to validate_uniqueness_of(:user_id).scoped_to([:track, :series]).with_message('has already been sent') }
end
+ describe '.tracks' do
+ it 'has an entry for every track' do
+ expect(Namespaces::InProductMarketingEmailsService::TRACKS.keys).to match_array(described_class.tracks.keys.map(&:to_sym))
+ end
+ end
+
describe '.without_track_and_series' do
let_it_be(:user) { create(:user) }
diff --git a/spec/models/work_item/type_spec.rb b/spec/models/work_item/type_spec.rb
new file mode 100644
index 00000000000..90f551b7d63
--- /dev/null
+++ b/spec/models/work_item/type_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItem::Type do
+ describe 'modules' do
+ it { is_expected.to include_module(CacheMarkdownField) }
+ end
+
+ describe 'associations' do
+ it { is_expected.to have_many(:work_items).with_foreign_key('work_item_type_id') }
+ it { is_expected.to belong_to(:namespace) }
+ end
+
+ describe '#destroy' do
+ let!(:work_item) { create :issue }
+
+ context 'when there are no work items of that type' do
+ it 'deletes type but not unrelated issues' do
+ type = create(:work_item_type)
+
+ expect { type.destroy! }.not_to change(Issue, :count)
+ expect(WorkItem::Type.count).to eq 0
+ end
+ end
+
+ it 'does not delete type when there are related issues' do
+ type = create(:work_item_type, work_items: [work_item])
+
+ expect { type.destroy! }.to raise_error(ActiveRecord::InvalidForeignKey)
+ expect(Issue.count).to eq 1
+ end
+ end
+
+ describe 'validation' do
+ describe 'name uniqueness' do
+ subject { create(:work_item_type) }
+
+ it { is_expected.to validate_uniqueness_of(:name).case_insensitive.scoped_to([:namespace_id]) }
+ end
+
+ it { is_expected.not_to allow_value('s' * 256).for(:icon_name) }
+ end
+
+ describe '#name' do
+ it 'strips name' do
+ work_item_type = described_class.new(name: ' label😸 ')
+ work_item_type.valid?
+
+ expect(work_item_type.name).to eq('label😸')
+ end
+ end
+end
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 8ff936d5a35..d62271eedf6 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -360,6 +360,21 @@ RSpec.describe IssuePolicy do
expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
end
end
+
+ context 'with a hidden issue' do
+ let(:user) { create(:user) }
+ let(:banned_user) { create(:user, :banned) }
+ let(:admin) { create(:user, :admin)}
+ let(:hidden_issue) { create(:issue, project: project, author: banned_user) }
+
+ it 'does not allow non-admin user to read the issue' do
+ expect(permissions(user, hidden_issue)).not_to be_allowed(:read_issue)
+ end
+
+ it 'allows admin to read the issue', :enable_admin_mode do
+ expect(permissions(admin, hidden_issue)).to be_allowed(:read_issue)
+ end
+ end
end
context 'with external authorization enabled' do
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 051a4420e73..f36b0a62aa3 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -840,6 +840,8 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_project) }
it { is_expected.to be_disallowed(:create_package) }
+
+ it_behaves_like 'package access with repository disabled'
end
context 'a deploy token with write_package_registry scope' do
@@ -849,6 +851,8 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_allowed(:read_package) }
it { is_expected.to be_allowed(:read_project) }
it { is_expected.to be_disallowed(:destroy_package) }
+
+ it_behaves_like 'package access with repository disabled'
end
end
@@ -1021,18 +1025,7 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_allowed(:read_package) }
- context 'when repository is disabled' do
- before do
- project.project_feature.update!(
- # Disable merge_requests and builds as well, since merge_requests and
- # builds cannot have higher visibility than repository.
- merge_requests_access_level: ProjectFeature::DISABLED,
- builds_access_level: ProjectFeature::DISABLED,
- repository_access_level: ProjectFeature::DISABLED)
- end
-
- it { is_expected.to be_disallowed(:read_package) }
- end
+ it_behaves_like 'package access with repository disabled'
end
context 'with owner' do
@@ -1460,66 +1453,65 @@ RSpec.describe ProjectPolicy do
end
describe 'when user is authenticated via CI_JOB_TOKEN', :request_store do
- let(:current_user) { developer }
- let(:job) { build_stubbed(:ci_build, project: scope_project, user: current_user) }
+ using RSpec::Parameterized::TableSyntax
- before do
- current_user.set_ci_job_token_scope!(job)
- scope_project.update!(ci_job_token_scope_enabled: true)
+ where(:project_visibility, :user_role, :external_user, :scope_project_type, :token_scope_enabled, :result) do
+ :private | :reporter | false | :same | true | true
+ :private | :reporter | false | :same | false | true
+ :private | :reporter | false | :different | true | false
+ :private | :reporter | false | :different | false | true
+ :private | :guest | false | :same | true | true
+ :private | :guest | false | :same | false | true
+ :private | :guest | false | :different | true | false
+ :private | :guest | false | :different | false | true
+
+ :internal | :reporter | false | :same | true | true
+ :internal | :reporter | true | :same | true | true
+ :internal | :reporter | false | :same | false | true
+ :internal | :reporter | false | :different | true | true
+ :internal | :reporter | true | :different | true | false
+ :internal | :reporter | false | :different | false | true
+ :internal | :guest | false | :same | true | true
+ :internal | :guest | true | :same | true | true
+ :internal | :guest | false | :same | false | true
+ :internal | :guest | false | :different | true | true
+ :internal | :guest | true | :different | true | false
+ :internal | :guest | false | :different | false | true
+
+ :public | :reporter | false | :same | true | true
+ :public | :reporter | false | :same | false | true
+ :public | :reporter | false | :different | true | true
+ :public | :reporter | false | :different | false | true
+ :public | :guest | false | :same | true | true
+ :public | :guest | false | :same | false | true
+ :public | :guest | false | :different | true | true
+ :public | :guest | false | :different | false | true
end
- context 'when accessing a private project' do
- let(:project) { private_project }
-
- context 'when the job token comes from the same project' do
- let(:scope_project) { project }
-
- it { is_expected.to be_allowed(:developer_access) }
- end
-
- context 'when the job token comes from another project' do
- let(:scope_project) { create(:project, :private) }
-
- before do
- scope_project.add_developer(current_user)
- end
-
- it { is_expected.to be_disallowed(:guest_access) }
-
- context 'when job token scope is disabled' do
- before do
- scope_project.update!(ci_job_token_scope_enabled: false)
- end
+ with_them do
+ let(:current_user) { public_send(user_role) }
+ let(:project) { public_send("#{project_visibility}_project") }
+ let(:job) { build_stubbed(:ci_build, project: scope_project, user: current_user) }
- it { is_expected.to be_allowed(:guest_access) }
+ let(:scope_project) do
+ if scope_project_type == :same
+ project
+ else
+ create(:project, :private)
end
end
- end
-
- context 'when accessing a public project' do
- let(:project) { public_project }
-
- context 'when the job token comes from the same project' do
- let(:scope_project) { project }
- it { is_expected.to be_allowed(:developer_access) }
+ before do
+ current_user.set_ci_job_token_scope!(job)
+ current_user.external = external_user
+ scope_project.update!(ci_job_token_scope_enabled: token_scope_enabled)
end
- context 'when the job token comes from another project' do
- let(:scope_project) { create(:project, :private) }
-
- before do
- scope_project.add_developer(current_user)
- end
-
- it { is_expected.to be_disallowed(:public_access) }
-
- context 'when job token scope is disabled' do
- before do
- scope_project.update!(ci_job_token_scope_enabled: false)
- end
-
- it { is_expected.to be_allowed(:public_access) }
+ it "enforces the expected permissions" do
+ if result
+ is_expected.to be_allowed("#{user_role}_access".to_sym)
+ else
+ is_expected.to be_disallowed("#{user_role}_access".to_sym)
end
end
end
diff --git a/spec/policies/release_policy_spec.rb b/spec/policies/release_policy_spec.rb
index 25468ae2ea2..5a34b1f4236 100644
--- a/spec/policies/release_policy_spec.rb
+++ b/spec/policies/release_policy_spec.rb
@@ -17,29 +17,6 @@ RSpec.describe ReleasePolicy, :request_store do
subject { described_class.new(user, release) }
- context 'when the evalute_protected_tag_for_release_permissions feature flag is disabled' do
- before do
- stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
- end
-
- it 'allows the user to create and update a release' do
- is_expected.to be_allowed(:create_release)
- is_expected.to be_allowed(:update_release)
- end
-
- it 'prevents the user from destroying a release' do
- is_expected.to be_disallowed(:destroy_release)
- end
-
- context 'when the user is maintainer' do
- let(:user) { maintainer }
-
- it 'allows the user to destroy a release' do
- is_expected.to be_allowed(:destroy_release)
- end
- end
- end
-
context 'when the user has access to the protected tag' do
let_it_be(:protected_tag) { create(:protected_tag, :developers_can_create, name: release.tag, project: project) }
diff --git a/spec/presenters/group_clusterable_presenter_spec.rb b/spec/presenters/group_clusterable_presenter_spec.rb
index 27360201e81..84b97ba0bb7 100644
--- a/spec/presenters/group_clusterable_presenter_spec.rb
+++ b/spec/presenters/group_clusterable_presenter_spec.rb
@@ -67,22 +67,6 @@ RSpec.describe GroupClusterablePresenter do
it { is_expected.to eq(cluster_status_group_cluster_path(group, cluster)) }
end
- describe '#install_applications_cluster_path' do
- let(:application) { :helm }
-
- subject { presenter.install_applications_cluster_path(cluster, application) }
-
- it { is_expected.to eq(install_applications_group_cluster_path(group, cluster, application)) }
- end
-
- describe '#update_applications_cluster_path' do
- let(:application) { :helm }
-
- subject { presenter.update_applications_cluster_path(cluster, application) }
-
- it { is_expected.to eq(update_applications_group_cluster_path(group, cluster, application)) }
- end
-
describe '#clear_cluster_cache_path' do
subject { presenter.clear_cluster_cache_path(cluster) }
diff --git a/spec/presenters/project_clusterable_presenter_spec.rb b/spec/presenters/project_clusterable_presenter_spec.rb
index b518c63f0ca..9057b518647 100644
--- a/spec/presenters/project_clusterable_presenter_spec.rb
+++ b/spec/presenters/project_clusterable_presenter_spec.rb
@@ -67,22 +67,6 @@ RSpec.describe ProjectClusterablePresenter do
it { is_expected.to eq(cluster_status_project_cluster_path(project, cluster)) }
end
- describe '#install_applications_cluster_path' do
- let(:application) { :helm }
-
- subject { presenter.install_applications_cluster_path(cluster, application) }
-
- it { is_expected.to eq(install_applications_project_cluster_path(project, cluster, application)) }
- end
-
- describe '#update_applications_cluster_path' do
- let(:application) { :helm }
-
- subject { presenter.update_applications_cluster_path(cluster, application) }
-
- it { is_expected.to eq(update_applications_project_cluster_path(project, cluster, application)) }
- end
-
describe '#clear_cluster_cache_path' do
subject { presenter.clear_cluster_cache_path(cluster) }
diff --git a/spec/presenters/sentry_error_presenter_spec.rb b/spec/presenters/sentry_error_presenter_spec.rb
index 86e43be1fa7..ce1b31d2371 100644
--- a/spec/presenters/sentry_error_presenter_spec.rb
+++ b/spec/presenters/sentry_error_presenter_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe SentryErrorPresenter do
- let(:error) { build(:detailed_error_tracking_error) }
+ let(:error) { build(:error_tracking_sentry_detailed_error) }
let(:presenter) { described_class.new(error) }
describe '#frequency' do
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index f0edfa6f227..1a28687c830 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -20,6 +20,48 @@ RSpec.describe API::BulkImports do
end
end
+ describe 'POST /bulk_imports' do
+ it 'starts a new migration' do
+ post api('/bulk_imports', user), params: {
+ configuration: {
+ url: 'http://gitlab.example',
+ access_token: 'access_token'
+ },
+ entities: [
+ source_type: 'group_entity',
+ source_full_path: 'full_path',
+ destination_name: 'destination_name',
+ destination_namespace: 'destination_namespace'
+ ]
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+
+ expect(json_response['status']).to eq('created')
+ end
+
+ context 'when provided url is blocked' do
+ it 'returns blocked url error' do
+ post api('/bulk_imports', user), params: {
+ configuration: {
+ url: 'url',
+ access_token: 'access_token'
+ },
+ entities: [
+ source_type: 'group_entity',
+ source_full_path: 'full_path',
+ destination_name: 'destination_name',
+ destination_namespace: 'destination_namespace'
+ ]
+ }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+
+ expect(json_response['message']).to eq('Validation failed: Url is blocked: Only allowed schemes are http, https')
+ end
+ end
+ end
+
describe 'GET /bulk_imports/entities' do
it 'returns a list of all import entities authored by the user' do
get api('/bulk_imports/entities', user)
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index cff006bed94..b6ab9310471 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Jobs do
+RSpec.describe API::Ci::Jobs do
include HttpBasicAuthHelpers
include DependencyProxyHelpers
@@ -114,7 +114,7 @@ RSpec.describe API::Jobs do
context 'with job token authentication header' do
include_context 'with auth headers' do
- let(:header) { { API::Helpers::Runner::JOB_TOKEN_HEADER => running_job.token } }
+ let(:header) { { API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => running_job.token } }
end
it_behaves_like 'returns common job data' do
@@ -150,7 +150,7 @@ RSpec.describe API::Jobs do
context 'with non running job' do
include_context 'with auth headers' do
- let(:header) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token } }
+ let(:header) { { API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => job.token } }
end
it_behaves_like 'returns unauthorized'
@@ -523,15 +523,13 @@ RSpec.describe API::Jobs do
context 'when artifacts are stored remotely' do
let(:proxy_download) { false }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+ let(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
stub_artifacts_object_storage(proxy_download: proxy_download)
- end
-
- let(:job) { create(:ci_build, pipeline: pipeline) }
- let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
- before do
+ artifact
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
@@ -708,11 +706,7 @@ RSpec.describe API::Jobs do
context 'with branch name containing slash' do
before do
pipeline.reload
- pipeline.update!(ref: 'improve/awesome',
- sha: project.commit('improve/awesome').sha)
- end
-
- before do
+ pipeline.update!(ref: 'improve/awesome', sha: project.commit('improve/awesome').sha)
get_for_ref('improve/awesome')
end
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index eb6c0861844..640e1ee6422 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -34,7 +34,28 @@ RSpec.describe API::Ci::Pipelines do
expect(json_response.first['sha']).to match(/\A\h{40}\z/)
expect(json_response.first['id']).to eq pipeline.id
expect(json_response.first['web_url']).to be_present
- expect(json_response.first.keys).to contain_exactly(*%w[id project_id sha ref status web_url created_at updated_at])
+ end
+
+ describe 'keys in the response' do
+ context 'when `pipeline_source_filter` feature flag is disabled' do
+ before do
+ stub_feature_flags(pipeline_source_filter: false)
+ end
+
+ it 'does not includes pipeline source' do
+ get api("/projects/#{project.id}/pipelines", user)
+
+ expect(json_response.first.keys).to contain_exactly(*%w[id project_id sha ref status web_url created_at updated_at])
+ end
+ end
+
+ context 'when `pipeline_source_filter` feature flag is disabled' do
+ it 'includes pipeline source' do
+ get api("/projects/#{project.id}/pipelines", user)
+
+ expect(json_response.first.keys).to contain_exactly(*%w[id project_id sha ref status web_url created_at updated_at source])
+ end
+ end
end
context 'when parameter is passed' do
@@ -294,6 +315,48 @@ RSpec.describe API::Ci::Pipelines do
end
end
end
+
+ context 'when a source is specified' do
+ before do
+ create(:ci_pipeline, project: project, source: :push)
+ create(:ci_pipeline, project: project, source: :web)
+ create(:ci_pipeline, project: project, source: :api)
+ end
+
+ context 'when `pipeline_source_filter` feature flag is disabled' do
+ before do
+ stub_feature_flags(pipeline_source_filter: false)
+ end
+
+ it 'returns all pipelines' do
+ get api("/projects/#{project.id}/pipelines", user), params: { source: 'web' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).not_to be_empty
+ expect(json_response.length).to be >= 3
+ end
+ end
+
+ context 'when `pipeline_source_filter` feature flag is enabled' do
+ it 'returns matched pipelines' do
+ get api("/projects/#{project.id}/pipelines", user), params: { source: 'web' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).not_to be_empty
+ json_response.each { |r| expect(r['source']).to eq('web') }
+ end
+
+ context 'when source is invalid' do
+ it 'returns bad_request' do
+ get api("/projects/#{project.id}/pipelines", user), params: { source: 'invalid-source' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
end
end
@@ -1150,4 +1213,43 @@ RSpec.describe API::Ci::Pipelines do
end
end
end
+
+ describe 'GET /projects/:id/pipelines/:pipeline_id/test_report_summary' do
+ subject { get api("/projects/#{project.id}/pipelines/#{pipeline.id}/test_report_summary", current_user) }
+
+ context 'authorized user' do
+ let(:current_user) { user }
+
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when pipeline does not have a test report summary' do
+ it 'returns an empty test report summary' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total']['count']).to eq(0)
+ end
+ end
+
+ context 'when pipeline has a test report summary' do
+ let(:pipeline) { create(:ci_pipeline, :with_report_results, project: project) }
+
+ it 'returns the test report summary' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total']['count']).to eq(2)
+ end
+ end
+ end
+
+ context 'unauthorized user' do
+ it 'does not return project pipelines' do
+ get api("/projects/#{project.id}/pipelines/#{pipeline.id}/test_report_summary", non_member)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq '404 Project Not Found'
+ end
+ end
+ end
end
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 017a12a4a40..195aac2e5f0 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
- let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
+ let(:headers_with_token) { headers.merge(API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
@@ -398,7 +398,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
context 'when using runners token' do
it 'responds with forbidden' do
- upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
+ upload_artifacts(file_upload, headers.merge(API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
expect(response).to have_gitlab_http_status(:forbidden)
end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 00c3a0a31af..adac81ff6f4 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -506,32 +506,12 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
describe 'preloading job_artifacts_archive' do
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(preload_associations_jobs_request_api_endpoint: false)
- end
-
- it 'queries the ci_job_artifacts table multiple times' do
- expect { request_job }.to exceed_all_query_limit(1).for_model(::Ci::JobArtifact)
- end
-
- it 'queries the ci_builds table more than three times' do
- expect { request_job }.to exceed_all_query_limit(3).for_model(::Ci::Build)
- end
+ it 'queries the ci_job_artifacts table once only' do
+ expect { request_job }.not_to exceed_all_query_limit(1).for_model(::Ci::JobArtifact)
end
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(preload_associations_jobs_request_api_endpoint: true)
- end
-
- it 'queries the ci_job_artifacts table once only' do
- expect { request_job }.not_to exceed_all_query_limit(1).for_model(::Ci::JobArtifact)
- end
-
- it 'queries the ci_builds table five times' do
- expect { request_job }.not_to exceed_all_query_limit(5).for_model(::Ci::Build)
- end
+ it 'queries the ci_builds table five times' do
+ expect { request_job }.not_to exceed_all_query_limit(5).for_model(::Ci::Build)
end
end
end
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index e20c7e36096..2760e306693 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
project: project, user: user, runner_id: runner.id, pipeline: pipeline)
end
- let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
+ let(:headers) { { API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 6d222046998..17b988a60c5 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -98,14 +98,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
create(:ci_runner, runner_type: :project_type, projects: [project], contacted_at: 1.second.ago)
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override)
end
- it 'does not create runner' do
- request
+ context 'with ci_runner_limits_override FF disabled' do
+ let(:ci_runner_limits_override) { false }
+
+ it 'does not create runner' do
+ request
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded'])
- expect(project.runners.reload.size).to eq(1)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded'])
+ expect(project.runners.reload.size).to eq(1)
+ end
+ end
+
+ context 'with ci_runner_limits_override FF enabled' do
+ let(:ci_runner_limits_override) { true }
+
+ it 'creates runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['message']).to be_nil
+ expect(project.runners.reload.size).to eq(2)
+ end
end
end
@@ -113,6 +132,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
create(:ci_runner, runner_type: :project_type, projects: [project], created_at: 14.months.ago, contacted_at: 13.months.ago)
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: false)
end
it 'creates runner' do
@@ -182,14 +204,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
create(:ci_runner, runner_type: :group_type, groups: [group], contacted_at: nil, created_at: 1.month.ago)
create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override)
end
- it 'does not create runner' do
- request
+ context 'with ci_runner_limits_override FF disabled' do
+ let(:ci_runner_limits_override) { false }
+
+ it 'does not create runner' do
+ request
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded'])
- expect(group.runners.reload.size).to eq(1)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded'])
+ expect(group.runners.reload.size).to eq(1)
+ end
+ end
+
+ context 'with ci_runner_limits_override FF enabled' do
+ let(:ci_runner_limits_override) { true }
+
+ it 'creates runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['message']).to be_nil
+ expect(group.runners.reload.size).to eq(2)
+ end
end
end
@@ -198,6 +239,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
create(:ci_runner, runner_type: :group_type, groups: [group], created_at: 4.months.ago, contacted_at: 3.months.ago)
create(:ci_runner, runner_type: :group_type, groups: [group], contacted_at: nil, created_at: 4.months.ago)
create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: false)
end
it 'creates runner' do
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 82fb4440429..902938d7d02 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -1003,13 +1003,31 @@ RSpec.describe API::Ci::Runners do
context 'when it exceeds the application limits' do
before do
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override)
end
- it 'does not enable specific runner' do
- expect do
- post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
- end.not_to change { project.runners.count }
- expect(response).to have_gitlab_http_status(:bad_request)
+ context 'with ci_runner_limits_override FF disabled' do
+ let(:ci_runner_limits_override) { false }
+
+ it 'does not enable specific runner' do
+ expect do
+ post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
+ end.not_to change { project.runners.count }
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'with ci_runner_limits_override FF enabled' do
+ let(:ci_runner_limits_override) { true }
+
+ it 'enables specific runner' do
+ expect do
+ post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
+ end.to change { project.runners.count }
+ expect(response).to have_gitlab_http_status(:created)
+ end
end
end
end
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/ci/triggers_spec.rb
index 4318f106996..410e2ae405e 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/ci/triggers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Triggers do
+RSpec.describe API::Ci::Triggers do
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/ci/variables_spec.rb
index 1ae9b0d548d..dc524e121d5 100644
--- a/spec/requests/api/variables_spec.rb
+++ b/spec/requests/api/ci/variables_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Variables do
+RSpec.describe API::Ci::Variables do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let!(:project) { create(:project, creator_id: user.id) }
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
index 931eaf41891..3e11b480860 100644
--- a/spec/requests/api/debian_group_packages_spec.rb
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -6,10 +6,16 @@ RSpec.describe API::DebianGroupPackages do
include WorkhorseHelpers
include_context 'Debian repository shared context', :group, false do
+ context 'with invalid parameter' do
+ let(:url) { "/groups/1/-/packages/debian/dists/with+space/InRelease" }
+
+ it_behaves_like 'Debian repository GET request', :bad_request, /^distribution is invalid$/
+ end
+
describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release.gpg' do
let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/Release.gpg" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^-----BEGIN PGP SIGNATURE-----/
end
describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do
@@ -21,7 +27,7 @@ RSpec.describe API::DebianGroupPackages do
describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do
let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/InRelease" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^Codename: fixture-distribution\n$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^-----BEGIN PGP SIGNED MESSAGE-----/
end
describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
@@ -30,10 +36,25 @@ RSpec.describe API::DebianGroupPackages do
it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /Description: This is an incomplete Packages file/
end
- describe 'GET groups/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{component.name}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture.name}.deb" }
+ describe 'GET groups/:id/-/packages/debian/pool/:codename/:project_id/:letter/:package_name/:package_version/:file_name' do
+ let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{package.debian_distribution.codename}/#{project.id}/#{letter}/#{package.name}/#{package.version}/#{file_name}" }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:file_name, :success_body) do
+ 'sample_1.2.3~alpha2.tar.xz' | /^.7zXZ/
+ 'sample_1.2.3~alpha2.dsc' | /^Format: 3.0 \(native\)/
+ 'libsample0_1.2.3~alpha2_amd64.deb' | /^!<arch>/
+ 'sample-udeb_1.2.3~alpha2_amd64.udeb' | /^!<arch>/
+ 'sample_1.2.3~alpha2_amd64.buildinfo' | /Build-Tainted-By/
+ 'sample_1.2.3~alpha2_amd64.changes' | /urgency=medium/
+ end
+
+ with_them do
+ include_context 'with file_name', params[:file_name]
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO File$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, params[:success_body]
+ end
end
end
end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index fb7da467322..d0b0debaf13 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -6,10 +6,16 @@ RSpec.describe API::DebianProjectPackages do
include WorkhorseHelpers
include_context 'Debian repository shared context', :project, true do
+ context 'with invalid parameter' do
+ let(:url) { "/projects/1/packages/debian/dists/with+space/InRelease" }
+
+ it_behaves_like 'Debian repository GET request', :bad_request, /^distribution is invalid$/
+ end
+
describe 'GET projects/:id/packages/debian/dists/*distribution/Release.gpg' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/Release.gpg" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^-----BEGIN PGP SIGNATURE-----/
end
describe 'GET projects/:id/packages/debian/dists/*distribution/Release' do
@@ -21,7 +27,7 @@ RSpec.describe API::DebianProjectPackages do
describe 'GET projects/:id/packages/debian/dists/*distribution/InRelease' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/InRelease" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^Codename: fixture-distribution\n$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^-----BEGIN PGP SIGNED MESSAGE-----/
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
@@ -30,10 +36,25 @@ RSpec.describe API::DebianProjectPackages do
it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /Description: This is an incomplete Packages file/
end
- describe 'GET projects/:id/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/projects/#{container.id}/packages/debian/pool/#{component.name}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture.name}.deb" }
+ describe 'GET projects/:id/packages/debian/pool/:codename/:letter/:package_name/:package_version/:file_name' do
+ let(:url) { "/projects/#{container.id}/packages/debian/pool/#{package.debian_distribution.codename}/#{letter}/#{package.name}/#{package.version}/#{file_name}" }
+
+ using RSpec::Parameterized::TableSyntax
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO File$/
+ where(:file_name, :success_body) do
+ 'sample_1.2.3~alpha2.tar.xz' | /^.7zXZ/
+ 'sample_1.2.3~alpha2.dsc' | /^Format: 3.0 \(native\)/
+ 'libsample0_1.2.3~alpha2_amd64.deb' | /^!<arch>/
+ 'sample-udeb_1.2.3~alpha2_amd64.udeb' | /^!<arch>/
+ 'sample_1.2.3~alpha2_amd64.buildinfo' | /Build-Tainted-By/
+ 'sample_1.2.3~alpha2_amd64.changes' | /urgency=medium/
+ end
+
+ with_them do
+ include_context 'with file_name', params[:file_name]
+
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, params[:success_body]
+ end
end
describe 'PUT projects/:id/packages/debian/:file_name' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 5d40e8c529a..bc7bb7523c9 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -360,6 +360,8 @@ RSpec.describe API::Environments do
expect(json_response["scheduled_entries"].size).to eq(1)
expect(json_response["scheduled_entries"].first["id"]).to eq(old_stopped_review_env.id)
expect(json_response["unprocessable_entries"].size).to eq(0)
+ expect(json_response["scheduled_entries"]).to match_schema('public_api/v4/environments')
+ expect(json_response["unprocessable_entries"]).to match_schema('public_api/v4/environments')
expect(old_stopped_review_env.reload.auto_delete_at).to eq(1.week.from_now)
expect(new_stopped_review_env.reload.auto_delete_at).to be_nil
diff --git a/spec/requests/api/error_tracking_collector_spec.rb b/spec/requests/api/error_tracking_collector_spec.rb
index 52d63410e7a..4b186657c4a 100644
--- a/spec/requests/api/error_tracking_collector_spec.rb
+++ b/spec/requests/api/error_tracking_collector_spec.rb
@@ -4,15 +4,17 @@ require 'spec_helper'
RSpec.describe API::ErrorTrackingCollector do
let_it_be(:project) { create(:project, :private) }
- let_it_be(:setting) { create(:project_error_tracking_setting, project: project) }
+ let_it_be(:setting) { create(:project_error_tracking_setting, :integrated, project: project) }
+ let_it_be(:client_key) { create(:error_tracking_client_key, project: project) }
describe "POST /error_tracking/collector/api/:id/envelope" do
let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') }
let_it_be(:url) { "/error_tracking/collector/api/#{project.id}/envelope" }
let(:params) { raw_event }
+ let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}" } }
- subject { post api(url), params: params }
+ subject { post api(url), params: params, headers: headers }
RSpec.shared_examples 'not found' do
it 'reponds with 404' do
@@ -38,6 +40,14 @@ RSpec.describe API::ErrorTrackingCollector do
it_behaves_like 'not found'
end
+ context 'integrated error tracking is disabled' do
+ before do
+ setting.update!(integrated: false)
+ end
+
+ it_behaves_like 'not found'
+ end
+
context 'feature flag is disabled' do
before do
stub_feature_flags(integrated_error_tracking: false)
@@ -46,6 +56,24 @@ RSpec.describe API::ErrorTrackingCollector do
it_behaves_like 'not found'
end
+ context 'auth headers are missing' do
+ let(:headers) { {} }
+
+ it_behaves_like 'bad request'
+ end
+
+ context 'public key is wrong' do
+ let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=glet_1fedb514e17f4b958435093deb02048c" } }
+
+ it_behaves_like 'not found'
+ end
+
+ context 'public key is inactive' do
+ let(:client_key) { create(:error_tracking_client_key, :disabled, project: project) }
+
+ it_behaves_like 'not found'
+ end
+
context 'empty body' do
let(:params) { '' }
diff --git a/spec/requests/api/error_tracking_spec.rb b/spec/requests/api/error_tracking_spec.rb
index 39121af7bc3..ec9a3378acc 100644
--- a/spec/requests/api/error_tracking_spec.rb
+++ b/spec/requests/api/error_tracking_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe API::ErrorTracking do
'active' => setting.reload.enabled,
'project_name' => setting.project_name,
'sentry_external_url' => setting.sentry_external_url,
- 'api_url' => setting.api_url
+ 'api_url' => setting.api_url,
+ 'integrated' => setting.integrated
)
end
end
@@ -79,6 +80,19 @@ RSpec.describe API::ErrorTracking do
.to eq('active is empty')
end
end
+
+ context 'with integrated param' do
+ let(:params) { { active: true, integrated: true } }
+
+ it 'updates the integrated flag' do
+ expect(setting.integrated).to be_falsey
+
+ make_request
+
+ expect(json_response).to include('integrated' => true)
+ expect(setting.reload.integrated).to be_truthy
+ end
+ end
end
context 'without a project setting' do
diff --git a/spec/requests/api/feature_flags_spec.rb b/spec/requests/api/feature_flags_spec.rb
index 8edf8825fb2..8c8c6803a38 100644
--- a/spec/requests/api/feature_flags_spec.rb
+++ b/spec/requests/api/feature_flags_spec.rb
@@ -417,7 +417,7 @@ RSpec.describe API::FeatureFlags do
version: 'new_version_flag',
strategies: [{
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '50', stickiness: 'DEFAULT' },
+ parameters: { groupId: 'default', rollout: '50', stickiness: 'default' },
scopes: [{
environment_scope: 'staging'
}]
@@ -434,7 +434,7 @@ RSpec.describe API::FeatureFlags do
expect(feature_flag.version).to eq('new_version_flag')
expect(feature_flag.strategies.map { |s| s.slice(:name, :parameters).deep_symbolize_keys }).to eq([{
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '50', stickiness: 'DEFAULT' }
+ parameters: { groupId: 'default', rollout: '50', stickiness: 'default' }
}])
expect(feature_flag.strategies.first.scopes.map { |s| s.slice(:environment_scope).deep_symbolize_keys }).to eq([{
environment_scope: 'staging'
@@ -630,7 +630,7 @@ RSpec.describe API::FeatureFlags do
strategies: [{
id: strategy.id,
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'default' }
}]
}
@@ -642,7 +642,7 @@ RSpec.describe API::FeatureFlags do
expect(result).to eq([{
id: strategy.id,
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'default' }
}])
end
@@ -677,7 +677,7 @@ RSpec.describe API::FeatureFlags do
params = {
strategies: [{
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'default' }
}]
}
@@ -694,7 +694,7 @@ RSpec.describe API::FeatureFlags do
parameters: {}
}, {
name: 'flexibleRollout',
- parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'default' }
}])
end
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index 378ee2f3e7c..4091253fb54 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe API::GenericPackages do
let_it_be(:project_deploy_token_wo) { create(:project_deploy_token, deploy_token: deploy_token_wo, project: project) }
let(:user) { personal_access_token.user }
- let(:ci_build) { create(:ci_build, :running, user: user, project: project) }
+ let(:ci_build) { create(:ci_build, :running, user: user) }
let(:snowplow_standard_context_params) { { user: user, project: project, namespace: project.namespace } }
def auth_header
diff --git a/spec/requests/api/go_proxy_spec.rb b/spec/requests/api/go_proxy_spec.rb
index 0143340de11..e678b6cf1c8 100644
--- a/spec/requests/api/go_proxy_spec.rb
+++ b/spec/requests/api/go_proxy_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe API::GoProxy do
let_it_be(:base) { "#{Settings.build_gitlab_go_url}/#{project.full_path}" }
let_it_be(:oauth) { create :oauth_access_token, scopes: 'api', resource_owner: user }
- let_it_be(:job) { create :ci_build, user: user, status: :running, project: project }
+ let_it_be(:job) { create :ci_build, user: user, status: :running }
let_it_be(:pa_token) { create :personal_access_token, user: user }
let_it_be(:modules) do
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 10f05efa1b8..e6362fdde88 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -117,14 +117,19 @@ RSpec.describe 'Query.project.pipeline' do
)
end
- it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new do
- post_graphql(query, current_user: user, variables: first_n.with(1))
+ it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
+ post_graphql(query, current_user: user)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(query, current_user: user)
end
+ create(:ci_build, name: 'test-a', pipeline: pipeline)
+ create(:ci_build, name: 'test-b', pipeline: pipeline)
+
expect do
- post_graphql(query, current_user: user, variables: first_n.with(3))
- end.not_to exceed_query_limit(control_count)
+ post_graphql(query, current_user: user)
+ end.not_to exceed_all_query_limit(control)
end
end
end
@@ -137,11 +142,19 @@ RSpec.describe 'Query.project.pipeline' do
query {
project(fullPath: "#{project.full_path}") {
pipeline(iid: "#{pipeline.iid}") {
- jobs {
+ stages {
nodes {
- artifacts {
+ groups{
nodes {
- downloadPath
+ jobs {
+ nodes {
+ artifacts {
+ nodes {
+ downloadPath
+ }
+ }
+ }
+ }
}
}
}
@@ -158,7 +171,7 @@ RSpec.describe 'Query.project.pipeline' do
post_graphql(query, current_user: user)
- job_data = graphql_data.dig('project', 'pipeline', 'jobs', 'nodes').first
+ job_data = graphql_data_at(:project, :pipeline, :stages, :nodes, :groups, :nodes, :jobs, :nodes).first
expect(job_data.dig('artifacts', 'nodes').count).to be(2)
end
end
@@ -169,7 +182,7 @@ RSpec.describe 'Query.project.pipeline' do
post_graphql(query, current_user: user)
- job_data = graphql_data.dig('project', 'pipeline', 'jobs', 'nodes').first
+ job_data = graphql_data_at(:project, :pipeline, :stages, :nodes, :groups, :nodes, :jobs, :nodes).first
expect(job_data['artifacts']).to be_nil
end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index cdd46ca4ecc..74547196445 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -52,14 +52,14 @@ RSpec.describe 'Query.runner(id)' do
'version' => runner.version,
'shortSha' => runner.short_sha,
'revision' => runner.revision,
- 'locked' => runner.locked,
+ 'locked' => false,
'active' => runner.active,
'status' => runner.status.to_s.upcase,
'maximumTimeout' => runner.maximum_timeout,
'accessLevel' => runner.access_level.to_s.upcase,
'runUntagged' => runner.run_untagged,
'ipAddress' => runner.ip_address,
- 'runnerType' => 'INSTANCE_TYPE',
+ 'runnerType' => runner.instance_type? ? 'INSTANCE_TYPE' : 'PROJECT_TYPE',
'jobCount' => 0,
'projectCount' => nil
)
@@ -109,6 +109,40 @@ RSpec.describe 'Query.runner(id)' do
end
end
+ describe 'for project runner' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(is_locked: [true, false])
+
+ with_them do
+ let(:project_runner) do
+ create(:ci_runner, :project, description: 'Runner 3', contacted_at: 1.day.ago, active: false, locked: is_locked,
+ version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
+ end
+
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: project_runner.to_global_id.to_s }]
+ ]
+ end
+
+ it 'retrieves correct locked value' do
+ post_graphql(query, current_user: user)
+
+ runner_data = graphql_data_at(:runner)
+
+ expect(runner_data).to match a_hash_including(
+ 'id' => "gid://gitlab/Ci::Runner/#{project_runner.id}",
+ 'locked' => is_locked
+ )
+ end
+ end
+ end
+
describe 'for inactive runner' do
it_behaves_like 'runner details fetch', :inactive_instance_runner
end
diff --git a/spec/requests/api/graphql/current_user_query_spec.rb b/spec/requests/api/graphql/current_user_query_spec.rb
index dc832b42fa5..086a57094ca 100644
--- a/spec/requests/api/graphql/current_user_query_spec.rb
+++ b/spec/requests/api/graphql/current_user_query_spec.rb
@@ -5,8 +5,15 @@ require 'spec_helper'
RSpec.describe 'getting project information' do
include GraphqlHelpers
+ let(:fields) do
+ <<~GRAPHQL
+ name
+ namespace { id }
+ GRAPHQL
+ end
+
let(:query) do
- graphql_query_for('currentUser', {}, 'name')
+ graphql_query_for('currentUser', {}, fields)
end
subject { graphql_data['currentUser'] }
@@ -20,7 +27,7 @@ RSpec.describe 'getting project information' do
it_behaves_like 'a working graphql query'
- it { is_expected.to include('name' => current_user.name) }
+ it { is_expected.to include('name' => current_user.name, 'namespace' => { 'id' => current_user.namespace.to_global_id.to_s }) }
end
context 'when there is no current_user' do
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index b6bbf8d5dd2..fd0ee5d52b9 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe 'getting group information' do
include GraphqlHelpers
include UploadHelpers
- let(:user1) { create(:user, can_create_group: false) }
- let(:user2) { create(:user) }
- let(:admin) { create(:admin) }
- let(:public_group) { create(:group, :public) }
- let(:private_group) { create(:group, :private) }
+ let_it_be(:user1) { create(:user, can_create_group: false) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:private_group) { create(:group, :private) }
+ let_it_be(:public_group) { create(:group, :public) }
# similar to the API "GET /groups/:id"
describe "Query group(fullPath)" do
@@ -78,6 +78,7 @@ RSpec.describe 'getting group information' do
expect(graphql_data['group']['parentId']).to eq(group1.parent_id)
expect(graphql_data['group']['issues']['nodes'].count).to eq(1)
expect(graphql_data['group']['issues']['nodes'][0]['iid']).to eq(issue.iid.to_s)
+ expect(graphql_data['group']['sharedRunnersSetting']).to eq(group1.shared_runners_setting.upcase)
end
it "does not return a non existing group" do
@@ -105,6 +106,20 @@ RSpec.describe 'getting group information' do
expect { post_multiplex(queries, current_user: admin) }
.to issue_same_number_of_queries_as { post_graphql(group_query(group1), current_user: admin) }
end
+
+ context "when querying group's descendant groups" do
+ let_it_be(:subgroup1) { create(:group, parent: public_group) }
+ let_it_be(:subgroup2) { create(:group, parent: subgroup1) }
+
+ let(:descendants) { [subgroup1, subgroup2] }
+
+ it 'returns all descendant groups user has access to' do
+ post_graphql(group_query(public_group), current_user: admin)
+
+ names = graphql_data['group']['descendantGroups']['nodes'].map { |n| n['name'] }
+ expect(names).to match_array(descendants.map(&:name))
+ end
+ end
end
context "when authenticated as admin" do
diff --git a/spec/requests/api/graphql/mutations/ci/job_cancel_spec.rb b/spec/requests/api/graphql/mutations/ci/job_cancel_spec.rb
new file mode 100644
index 00000000000..ee0f0a9bccb
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job_cancel_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "JobCancel" do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let_it_be(:job) { create(:ci_build, pipeline: pipeline, name: 'build') }
+
+ let(:mutation) do
+ variables = {
+ id: job.to_global_id.to_s
+ }
+ graphql_mutation(:job_cancel, variables,
+ <<-QL
+ errors
+ job {
+ id
+ }
+ QL
+ )
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:job_cancel) }
+
+ it 'returns an error if the user is not allowed to cancel the job' do
+ project.add_developer(user)
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'cancels a job' do
+ job_id = ::Gitlab::GlobalId.build(job, id: job.id).to_s
+ project.add_maintainer(user)
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['job']['id']).to eq(job_id)
+ expect(job.reload.status).to eq('canceled')
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/job_unschedule_spec.rb b/spec/requests/api/graphql/mutations/ci/job_unschedule_spec.rb
new file mode 100644
index 00000000000..4ddc019a2b5
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job_unschedule_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'JobUnschedule' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let_it_be(:job) { create(:ci_build, :scheduled, pipeline: pipeline, name: 'build') }
+
+ let(:mutation) do
+ variables = {
+ id: job.to_global_id.to_s
+ }
+ graphql_mutation(:job_unschedule, variables,
+ <<-QL
+ errors
+ job {
+ id
+ }
+ QL
+ )
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:job_unschedule) }
+
+ it 'returns an error if the user is not allowed to unschedule the job' do
+ project.add_developer(user)
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ expect(job.reload.status).to eq('scheduled')
+ end
+
+ it 'unschedules a job' do
+ project.add_maintainer(user)
+
+ job_id = ::Gitlab::GlobalId.build(job, id: job.id).to_s
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['job']['id']).to eq(job_id)
+ expect(job.reload.status).to eq('manual')
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/groups/update_spec.rb b/spec/requests/api/graphql/mutations/groups/update_spec.rb
new file mode 100644
index 00000000000..b9dfb8e37ab
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/groups/update_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'GroupUpdate' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:group) { create(:group) }
+
+ let(:variables) do
+ {
+ full_path: group.full_path,
+ shared_runners_setting: 'DISABLED_WITH_OVERRIDE'
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:group_update, variables) }
+
+ context 'when unauthorized' do
+ shared_examples 'unauthorized' do
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+ end
+
+ context 'when not a group member' do
+ it_behaves_like 'unauthorized'
+ end
+
+ context 'when a non-admin group member' do
+ before do
+ group.add_developer(user)
+ end
+
+ it_behaves_like 'unauthorized'
+ end
+ end
+
+ context 'when authorized' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'updates shared runners settings' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_errors).to be_nil
+ expect(group.reload.shared_runners_setting).to eq(variables[:shared_runners_setting].downcase)
+ end
+
+ context 'when bad arguments are provided' do
+ let(:variables) { { full_path: '', shared_runners_setting: 'INVALID' } }
+
+ it 'returns the errors' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ expect(group.reload.shared_runners_setting).to eq('enabled')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
index ea5be9f9852..72e47a98373 100644
--- a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Setting Due Date of an issue' do
it 'returns an error' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(graphql_errors).to include(a_hash_including('message' => /Argument dueDate must be provided/))
+ expect(graphql_errors).to include(a_hash_including('message' => /Arguments must be provided: dueDate/))
end
end
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
index b3e1ab62e54..c3aaf090703 100644
--- a/spec/requests/api/graphql/mutations/issues/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe 'Update of an existing issue' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:label1) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
let(:input) do
{
@@ -20,7 +22,9 @@ RSpec.describe 'Update of an existing issue' do
}
end
- let(:mutation) { graphql_mutation(:update_issue, input.merge(project_path: project.full_path, locked: true)) }
+ let(:extra_params) { { project_path: project.full_path, locked: true } }
+ let(:input_params) { input.merge(extra_params) }
+ let(:mutation) { graphql_mutation(:update_issue, input_params) }
let(:mutation_response) { graphql_mutation_response(:update_issue) }
context 'the user is not allowed to update issue' do
@@ -39,5 +43,82 @@ RSpec.describe 'Update of an existing issue' do
expect(mutation_response['issue']).to include(input)
expect(mutation_response['issue']).to include('discussionLocked' => true)
end
+
+ context 'setting labels' do
+ let(:mutation) do
+ graphql_mutation(:update_issue, input_params) do
+ <<~QL
+ issue {
+ labels {
+ nodes {
+ id
+ }
+ }
+ }
+ errors
+ QL
+ end
+ end
+
+ context 'reset labels' do
+ let(:input_params) { input.merge(extra_params).merge({ labelIds: [label1.id, label2.id] }) }
+
+ it 'resets labels' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['errors']).to be_nil
+ expect(mutation_response['issue']['labels']).to include({ "nodes" => [{ "id" => label1.to_global_id.to_s }, { "id" => label2.to_global_id.to_s }] })
+ end
+
+ context 'reset labels and add labels' do
+ let(:input_params) { input.merge(extra_params).merge({ labelIds: [label1.id], addLabelIds: [label2.id] }) }
+
+ it 'returns error for mutually exclusive arguments' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['errors'].first['message']).to eq('labelIds is mutually exclusive with any of addLabelIds or removeLabelIds')
+ expect(mutation_response).to be_nil
+ end
+ end
+
+ context 'reset labels and remove labels' do
+ let(:input_params) { input.merge(extra_params).merge({ labelIds: [label1.id], removeLabelIds: [label2.id] }) }
+
+ it 'returns error for mutually exclusive arguments' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['errors'].first['message']).to eq('labelIds is mutually exclusive with any of addLabelIds or removeLabelIds')
+ expect(mutation_response).to be_nil
+ end
+ end
+
+ context 'with global label ids' do
+ let(:input_params) { input.merge(extra_params).merge({ labelIds: [label1.to_global_id.to_s, label2.to_global_id.to_s] }) }
+
+ it 'resets labels' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['errors']).to be_nil
+ expect(mutation_response['issue']['labels']).to include({ "nodes" => [{ "id" => label1.to_global_id.to_s }, { "id" => label2.to_global_id.to_s }] })
+ end
+ end
+ end
+
+ context 'add and remove labels' do
+ let(:input_params) { input.merge(extra_params).merge({ addLabelIds: [label1.id], removeLabelIds: [label2.id] }) }
+
+ it 'returns error for mutually exclusive arguments' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['errors']).to be_nil
+ expect(mutation_response['issue']['labels']).to include({ "nodes" => [{ "id" => label1.to_global_id.to_s }] })
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/packages/destroy_file_spec.rb b/spec/requests/api/graphql/mutations/packages/destroy_file_spec.rb
new file mode 100644
index 00000000000..7be629f8f4b
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/destroy_file_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Destroying a package file' do
+ using RSpec::Parameterized::TableSyntax
+
+ include GraphqlHelpers
+
+ let_it_be_with_reload(:package) { create(:maven_package) }
+ let_it_be(:user) { create(:user) }
+
+ let(:project) { package.project }
+ let(:id) { package.package_files.first.to_global_id.to_s }
+
+ let(:query) do
+ <<~GQL
+ errors
+ GQL
+ end
+
+ let(:params) { { id: id } }
+ let(:mutation) { graphql_mutation(:destroy_package_file, params, query) }
+ let(:mutation_response) { graphql_mutation_response(:destroyPackageFile) }
+
+ shared_examples 'destroying the package file' do
+ it 'destroy the package file' do
+ expect { mutation_request }.to change { ::Packages::PackageFile.count }.by(-1)
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ shared_examples 'denying the mutation request' do
+ it 'does not destroy the package file' do
+ expect(::Packages::PackageFile)
+ .not_to receive(:destroy)
+
+ expect { mutation_request }.not_to change { ::Packages::PackageFile.count }
+
+ expect(mutation_response).to be_nil
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ describe 'post graphql mutation' do
+ subject(:mutation_request) { post_graphql_mutation(mutation, current_user: user) }
+
+ context 'with valid id' do
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'destroying the package file'
+ :developer | 'denying the mutation request'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'with invalid id' do
+ let(:params) { { id: 'gid://gitlab/Packages::PackageFile/5555' } }
+
+ it_behaves_like 'denying the mutation request'
+ end
+
+ context 'when an error occures' do
+ let(:error_messages) { ['some error'] }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns the errors in the response' do
+ allow_next_found_instance_of(::Packages::PackageFile) do |package_file|
+ allow(package_file).to receive(:destroy).and_return(false)
+ allow(package_file).to receive_message_chain(:errors, :full_messages).and_return(error_messages)
+ end
+
+ mutation_request
+
+ expect(mutation_response['errors']).to eq(error_messages)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
index 43d846cb297..77fd6cddc09 100644
--- a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe 'Mark snippet as spam' do
end
it 'marks snippet as spam' do
- expect_next(Spam::MarkAsSpamService, target: snippet)
+ expect_next(Spam::AkismetMarkAsSpamService, target: snippet)
.to receive(:execute).and_return(true)
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/packages/nuget_spec.rb b/spec/requests/api/graphql/packages/nuget_spec.rb
index 1de16009684..ba8d2ca42d2 100644
--- a/spec/requests/api/graphql/packages/nuget_spec.rb
+++ b/spec/requests/api/graphql/packages/nuget_spec.rb
@@ -6,8 +6,11 @@ RSpec.describe 'nuget package details' do
include_context 'package details setup'
let_it_be(:package) { create(:nuget_package, :with_metadatum, project: project) }
+ let_it_be(:dependency_link) { create(:packages_dependency_link, :with_nuget_metadatum, package: package) }
let(:metadata) { query_graphql_fragment('NugetMetadata') }
+ let(:dependency_link_response) { graphql_data_at(:package, :dependency_links, :nodes, 0) }
+ let(:dependency_response) { graphql_data_at(:package, :dependency_links, :nodes, 0, :dependency) }
subject { post_graphql(query, current_user: user) }
@@ -26,4 +29,34 @@ RSpec.describe 'nuget package details' do
'iconUrl' => package.nuget_metadatum.icon_url
)
end
+
+ it 'has dependency links' do
+ expect(dependency_link_response).to include(
+ 'id' => global_id_of(dependency_link),
+ 'dependencyType' => dependency_link.dependency_type.upcase
+ )
+
+ expect(dependency_response).to include(
+ 'id' => global_id_of(dependency_link.dependency),
+ 'name' => dependency_link.dependency.name,
+ 'versionPattern' => dependency_link.dependency.version_pattern
+ )
+ end
+
+ it 'avoids N+1 queries' do
+ first_user = create(:user)
+ second_user = create(:user)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: first_user)
+ end
+
+ create_list(:packages_dependency_link, 10, :with_nuget_metadatum, package: package)
+
+ expect do
+ post_graphql(query, current_user: second_user)
+ end.not_to exceed_query_limit(control_count)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
diff --git a/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb
index 05a98a9dd9c..29896c16f5b 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'getting Alert Management Alert Issue' do
context 'with gitlab alert' do
before do
- create(:alert_management_alert, :with_issue, project: project, payload: payload)
+ create(:alert_management_alert, :with_incident, project: project, payload: payload)
end
it 'includes the correct alert issue payload data' do
@@ -57,7 +57,7 @@ RSpec.describe 'getting Alert Management Alert Issue' do
context 'with gitlab alert' do
before do
- create(:alert_management_alert, :with_issue, project: project, payload: payload)
+ create(:alert_management_alert, :with_incident, project: project, payload: payload)
end
it 'avoids N+1 queries' do
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
index 14fabaaf032..40a3281d3b7 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'getting a detailed sentry error' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:project_setting) { create(:project_error_tracking_setting, project: project) }
let_it_be(:current_user) { project.owner }
- let_it_be(:sentry_detailed_error) { build(:detailed_error_tracking_error) }
+ let_it_be(:sentry_detailed_error) { build(:error_tracking_sentry_detailed_error) }
let(:sentry_gid) { sentry_detailed_error.to_global_id.to_s }
let(:fields) do
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
index e71e5a48ddc..80376f56ee8 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'sentry errors requests' do
end
describe 'getting a detailed sentry error' do
- let_it_be(:sentry_detailed_error) { build(:detailed_error_tracking_error) }
+ let_it_be(:sentry_detailed_error) { build(:error_tracking_sentry_detailed_error) }
let(:sentry_gid) { sentry_detailed_error.to_global_id.to_s }
@@ -97,7 +97,7 @@ RSpec.describe 'sentry errors requests' do
end
describe 'getting an errors list' do
- let_it_be(:sentry_error) { build(:error_tracking_error) }
+ let_it_be(:sentry_error) { build(:error_tracking_sentry_error) }
let_it_be(:pagination) do
{
'next' => { 'cursor' => '2222' },
@@ -193,7 +193,7 @@ RSpec.describe 'sentry errors requests' do
end
describe 'getting a stack trace' do
- let_it_be(:sentry_stack_trace) { build(:error_tracking_error_event) }
+ let_it_be(:sentry_stack_trace) { build(:error_tracking_sentry_error_event) }
let(:sentry_gid) { global_id_of(Gitlab::ErrorTracking::DetailedError.new(id: 1)) }
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index dd9d44136e5..ff0d7ecceb5 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -323,7 +323,7 @@ RSpec.describe 'getting an issue list for a project' do
it 'avoids N+1 queries' do
control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
- create(:alert_management_alert, :with_issue, project: project)
+ create(:alert_management_alert, :with_incident, project: project)
expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(control)
end
@@ -476,6 +476,17 @@ RSpec.describe 'getting an issue list for a project' do
include_examples 'N+1 query check'
end
+ context 'when requesting `merge_requests_count`' do
+ let(:requested_fields) { [:merge_requests_count] }
+
+ before do
+ create_list(:merge_requests_closing_issues, 2, issue: issue_a)
+ create_list(:merge_requests_closing_issues, 3, issue: issue_b)
+ end
+
+ include_examples 'N+1 query check'
+ end
+
context 'when requesting `timelogs`' do
let(:requested_fields) { 'timelogs { nodes { timeSpent } }' }
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index 7fc1ef05fa7..1b0405be09c 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -422,6 +422,46 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
end
end
+
+ context 'when sorting by closed_at DESC' do
+ let(:sort_param) { :CLOSED_AT_DESC }
+ let(:expected_results) do
+ [
+ merge_request_b,
+ merge_request_d,
+ merge_request_c,
+ merge_request_e,
+ merge_request_a
+ ].map { |mr| global_id_of(mr) }
+ end
+
+ before do
+ five_days_ago = 5.days.ago
+
+ merge_request_d.metrics.update!(latest_closed_at: five_days_ago)
+
+ # same merged_at, the second order column will decide (merge_request.id)
+ merge_request_c.metrics.update!(latest_closed_at: five_days_ago)
+
+ merge_request_b.metrics.update!(latest_closed_at: 1.day.ago)
+ end
+
+ it_behaves_like 'sorted paginated query' do
+ let(:first_param) { 2 }
+ end
+
+ context 'when last parameter is given' do
+ let(:params) { graphql_args(sort: sort_param, last: 2) }
+ let(:page_info) { nil }
+
+ it 'takes the last 2 records' do
+ query = pagination_query(params)
+ post_graphql(query, current_user: current_user)
+
+ expect(results.map { |item| item["id"] }).to eq(expected_results.last(2))
+ end
+ end
+ end
end
context 'when only the count is requested' do
diff --git a/spec/requests/api/graphql/project/repository_spec.rb b/spec/requests/api/graphql/project/repository_spec.rb
index bddd300e27f..8810f2fa3d5 100644
--- a/spec/requests/api/graphql/project/repository_spec.rb
+++ b/spec/requests/api/graphql/project/repository_spec.rb
@@ -83,4 +83,26 @@ RSpec.describe 'getting a repository in a project' do
expect(graphql_data['project']['repository']).to be_nil
end
end
+
+ context 'when paginated tree requested' do
+ let(:fields) do
+ %(
+ paginatedTree {
+ nodes {
+ trees {
+ nodes {
+ path
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'returns paginated tree' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data['project']['repository']['paginatedTree']).to be_present
+ end
+ end
end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 7b081bb7568..7d182a3414b 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -385,7 +385,7 @@ RSpec.describe 'GraphQL' do
context 'authenticated user' do
subject { post_graphql(query, current_user: user) }
- it 'does not raise an error as it uses the `AUTHENTICATED_COMPLEXITY`' do
+ it 'does not raise an error as it uses the `AUTHENTICATED_MAX_COMPLEXITY`' do
subject
expect(graphql_errors).to be_nil
diff --git a/spec/requests/api/group_debian_distributions_spec.rb b/spec/requests/api/group_debian_distributions_spec.rb
new file mode 100644
index 00000000000..ec1912b72bf
--- /dev/null
+++ b/spec/requests/api/group_debian_distributions_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::GroupDebianDistributions do
+ include HttpBasicAuthHelpers
+ include WorkhorseHelpers
+
+ include_context 'Debian repository shared context', :group, false do
+ describe 'POST groups/:id/-/debian_distributions' do
+ let(:method) { :post }
+ let(:url) { "/groups/#{container.id}/-/debian_distributions" }
+ let(:api_params) { { 'codename': 'my-codename' } }
+
+ it_behaves_like 'Debian repository write endpoint', 'POST distribution request', :created, /^{.*"codename":"my-codename",.*"components":\["main"\],.*"architectures":\["all","amd64"\]/, authenticate_non_public: false
+ end
+
+ describe 'GET groups/:id/-/debian_distributions' do
+ let(:url) { "/groups/#{container.id}/-/debian_distributions" }
+
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^\[{.*"codename":"existing-codename",.*"components":\["existing-component"\],.*"architectures":\["all","existing-arch"\]/, authenticate_non_public: false
+ end
+
+ describe 'GET groups/:id/-/debian_distributions/:codename' do
+ let(:url) { "/groups/#{container.id}/-/debian_distributions/#{distribution.codename}" }
+
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^{.*"codename":"existing-codename",.*"components":\["existing-component"\],.*"architectures":\["all","existing-arch"\]/, authenticate_non_public: false
+ end
+
+ describe 'PUT groups/:id/-/debian_distributions/:codename' do
+ let(:method) { :put }
+ let(:url) { "/groups/#{container.id}/-/debian_distributions/#{distribution.codename}" }
+ let(:api_params) { { suite: 'my-suite' } }
+
+ it_behaves_like 'Debian repository write endpoint', 'PUT distribution request', :success, /^{.*"codename":"existing-codename",.*"suite":"my-suite",/, authenticate_non_public: false
+ end
+
+ describe 'DELETE groups/:id/-/debian_distributions/:codename' do
+ let(:method) { :delete }
+ let(:url) { "/groups/#{container.id}/-/debian_distributions/#{distribution.codename}" }
+
+ it_behaves_like 'Debian repository maintainer write endpoint', 'DELETE distribution request', :success, /^{"message":"202 Accepted"}$/, authenticate_non_public: false
+ end
+ end
+end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index ad7a2e3b1fb..30df47ccc41 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe API::Groups do
let_it_be(:user2) { create(:user) }
let_it_be(:user3) { create(:user) }
let_it_be(:admin) { create(:admin) }
- let_it_be(:group1) { create(:group, avatar: File.open(uploaded_image_temp_path)) }
+ let_it_be(:group1) { create(:group, path: 'some_path', avatar: File.open(uploaded_image_temp_path)) }
let_it_be(:group2) { create(:group, :private) }
let_it_be(:project1) { create(:project, namespace: group1) }
let_it_be(:project2) { create(:project, namespace: group2) }
@@ -63,6 +63,19 @@ RSpec.describe API::Groups do
end
end
+ shared_examples 'skips searching in full path' do
+ it 'does not find groups by full path' do
+ subgroup = create(:group, parent: parent, path: "#{parent.path}-subgroup")
+ create(:group, parent: parent, path: 'not_matching_path')
+
+ get endpoint, params: { search: parent.path }
+
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['id']).to eq(subgroup.id)
+ end
+ end
+
describe "GET /groups" do
context "when unauthenticated" do
it "returns public groups" do
@@ -406,6 +419,22 @@ RSpec.describe API::Groups do
expect(response_groups).to contain_exactly(group2.id, group3.id)
end
end
+
+ context 'when searching' do
+ let_it_be(:subgroup1) { create(:group, parent: group1, path: 'some_path') }
+
+ let(:response_groups) { json_response.map { |group| group['id'] } }
+
+ subject { get api('/groups', user1), params: { search: group1.path } }
+
+ it 'finds also groups with full path matching search param' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(response_groups).to match_array([group1.id, subgroup1.id])
+ end
+ end
end
describe "GET /groups/:id" do
@@ -936,23 +965,6 @@ RSpec.describe API::Groups do
expect(project_names).to eq(['Project', 'Test', 'Test Project'])
end
end
-
- context 'when `similarity_search` feature flag is off' do
- before do
- stub_feature_flags(similarity_search: false)
- end
-
- it 'returns items ordered by name' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response.length).to eq(2)
-
- project_names = json_response.map { |proj| proj['name'] }
- expect(project_names).to eq(['Test', 'Test Project'])
- end
- end
end
it "returns the group's projects with simple representation" do
@@ -1424,6 +1436,11 @@ RSpec.describe API::Groups do
expect(json_response.first).to include('statistics')
end
end
+
+ it_behaves_like 'skips searching in full path' do
+ let(:parent) { group1 }
+ let(:endpoint) { api("/groups/#{group1.id}/subgroups", user1) }
+ end
end
describe 'GET /groups/:id/descendant_groups' do
@@ -1558,6 +1575,11 @@ RSpec.describe API::Groups do
expect(json_response.first).to include('statistics')
end
end
+
+ it_behaves_like 'skips searching in full path' do
+ let(:parent) { group1 }
+ let(:endpoint) { api("/groups/#{group1.id}/descendant_groups", user1) }
+ end
end
describe "POST /groups" do
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index f9f03c9e55c..76a4548df8a 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -152,6 +152,20 @@ RSpec.describe API::Invitations do
end
end
+ context 'with areas_of_focus', :snowplow do
+ it 'tracks the areas_of_focus from params' do
+ post invitations_url(source, maintainer),
+ params: { email: email, access_level: Member::DEVELOPER, areas_of_focus: 'Other' }
+
+ expect_snowplow_event(
+ category: 'Members::InviteService',
+ action: 'area_of_focus',
+ label: 'Other',
+ property: source.members.last.id.to_s
+ )
+ end
+ end
+
context 'with invite_source considerations', :snowplow do
let(:params) { { email: email, access_level: Member::DEVELOPER } }
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index d9f11b19e6e..c3fd02dad51 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe API::MavenPackages do
let_it_be(:package_file) { package.package_files.with_file_name_like('%.xml').first }
let_it_be(:jar_file) { package.package_files.with_file_name_like('%.jar').first }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
- let_it_be(:job, reload: true) { create(:ci_build, user: user, status: :running, project: project) }
+ let_it_be(:job, reload: true) { create(:ci_build, user: user, status: :running) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) }
@@ -217,6 +217,15 @@ RSpec.describe API::MavenPackages do
end
end
+ shared_examples 'successfully returning the file' do
+ it 'returns the file', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+ end
+
describe 'GET /api/v4/packages/maven/*path/:file_name' do
context 'a public project' do
subject { download_file(file_name: package_file.file_name) }
@@ -224,12 +233,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file' do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'returns sha1 of the file' do
download_file(file_name: package_file.file_name + '.sha1')
@@ -260,12 +264,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file' do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'denies download when no private token' do
download_file(file_name: package_file.file_name)
@@ -297,12 +296,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file' do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'denies download when not enough permissions' do
unless project.root_namespace == user.namespace
@@ -409,12 +403,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file for a group' do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'returns sha1 of the file' do
download_file(file_name: package_file.file_name + '.sha1')
@@ -445,12 +434,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file for a group' do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'denies download when no private token' do
download_file(file_name: package_file.file_name)
@@ -482,12 +466,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file for a group' do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'denies download when not enough permissions' do
group.add_guest(user)
@@ -516,12 +495,7 @@ RSpec.describe API::MavenPackages do
context 'with group deploy token' do
subject { download_file_with_token(file_name: package_file.file_name, request_headers: group_deploy_token_headers) }
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'returns the file with only write_package_registry scope' do
deploy_token_for_group.update!(read_package_registry: false)
@@ -553,12 +527,7 @@ RSpec.describe API::MavenPackages do
group.add_reporter(user)
end
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
context 'with a non existing maven path' do
subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3', request_headers: headers_with_token, group_id: root_group.id) }
@@ -657,12 +626,7 @@ RSpec.describe API::MavenPackages do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'returns sha1 of the file' do
download_file(file_name: package_file.file_name + '.sha1')
@@ -672,6 +636,19 @@ RSpec.describe API::MavenPackages do
expect(response.body).to eq(package_file.file_sha1)
end
+ context 'when the repository is disabled' do
+ before do
+ project.project_feature.update!(
+ # Disable merge_requests and builds as well, since merge_requests and
+ # builds cannot have higher visibility than repository.
+ merge_requests_access_level: ProjectFeature::DISABLED,
+ builds_access_level: ProjectFeature::DISABLED,
+ repository_access_level: ProjectFeature::DISABLED)
+ end
+
+ it_behaves_like 'successfully returning the file'
+ end
+
context 'with a non existing maven path' do
subject { download_file(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
@@ -688,12 +665,7 @@ RSpec.describe API::MavenPackages do
it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully returning the file'
it 'denies download when not enough permissions' do
project.add_guest(user)
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index cac1b95e854..48ded93d85f 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -409,6 +409,53 @@ RSpec.describe API::Members do
end
end
+ context 'with areas_of_focus considerations', :snowplow do
+ context 'when there is 1 user to add' do
+ let(:user_id) { stranger.id }
+
+ context 'when areas_of_focus is present in params' do
+ it 'tracks the areas_of_focus' do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ params: { user_id: user_id, access_level: Member::DEVELOPER, areas_of_focus: 'Other' }
+
+ expect_snowplow_event(
+ category: 'Members::CreateService',
+ action: 'area_of_focus',
+ label: 'Other',
+ property: source.members.last.id.to_s
+ )
+ end
+ end
+
+ context 'when areas_of_focus is not present in params' do
+ it 'does not track the areas_of_focus' do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ params: { user_id: user_id, access_level: Member::DEVELOPER }
+
+ expect_no_snowplow_event(category: 'Members::CreateService', action: 'area_of_focus')
+ end
+ end
+ end
+
+ context 'when there are multiple users to add' do
+ let(:user_id) { [developer.id, stranger.id].join(',') }
+
+ context 'when areas_of_focus is present in params' do
+ it 'tracks the areas_of_focus' do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ params: { user_id: user_id, access_level: Member::DEVELOPER, areas_of_focus: 'Other' }
+
+ expect_snowplow_event(
+ category: 'Members::CreateService',
+ action: 'area_of_focus',
+ label: 'Other',
+ property: source.members.last.id.to_s
+ )
+ end
+ end
+ end
+ end
+
it "returns 409 if member already exists" do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
params: { user_id: maintainer.id, access_level: Member::MAINTAINER }
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 1ed06a40f16..222d8992d1b 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -91,6 +91,19 @@ RSpec.describe API::Namespaces do
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
end
+
+ context 'with owned_only param' do
+ it 'returns only owned groups' do
+ group1.add_developer(user)
+ group2.add_owner(user)
+
+ get api("/namespaces?owned_only=true", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.map { |resource| resource['id'] }).to match_array([user.namespace_id, group2.id])
+ end
+ end
end
end
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index ab74da4bda4..8c35a1642e2 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe API::NpmProjectPackages do
context 'with a job token for a different user' do
let_it_be(:other_user) { create(:user) }
- let_it_be_with_reload(:other_job) { create(:ci_build, :running, user: other_user, project: project) }
+ let_it_be_with_reload(:other_job) { create(:ci_build, :running, user: other_user) }
let(:headers) { build_token_auth_header(other_job.token) }
@@ -161,8 +161,10 @@ RSpec.describe API::NpmProjectPackages do
end
end
- context 'valid package record' do
- let(:params) { upload_params(package_name: package_name) }
+ context 'valid package params' do
+ let_it_be(:version) { '1.2.3' }
+
+ let(:params) { upload_params(package_name: package_name, package_version: version) }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user } }
shared_examples 'handling upload with different authentications' do
@@ -211,6 +213,15 @@ RSpec.describe API::NpmProjectPackages do
end
end
+ shared_examples 'uploading the package' do
+ it 'uploads the package' do
+ expect { upload_package_with_token(package_name, params) }
+ .to change { project.packages.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
context 'with a scoped name' do
let(:package_name) { "@#{group.path}/my_package_name" }
@@ -228,6 +239,32 @@ RSpec.describe API::NpmProjectPackages do
it_behaves_like 'handling upload with different authentications'
end
+
+ context 'with an existing package' do
+ let_it_be(:second_project) { create(:project, namespace: namespace) }
+
+ context 'following the naming convention' do
+ let_it_be(:second_package) { create(:npm_package, project: second_project, name: "@#{group.path}/test", version: version) }
+
+ let(:package_name) { "@#{group.path}/test" }
+
+ it_behaves_like 'handling invalid record with 400 error'
+
+ context 'with a new version' do
+ let_it_be(:version) { '4.5.6' }
+
+ it_behaves_like 'uploading the package'
+ end
+ end
+
+ context 'not following the naming convention' do
+ let_it_be(:second_package) { create(:npm_package, project: second_project, name: "@any_scope/test", version: version) }
+
+ let(:package_name) { "@any_scope/test" }
+
+ it_behaves_like 'uploading the package'
+ end
+ end
end
context 'package creation fails' do
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 8341fac3191..c5bcedd491a 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -89,6 +89,8 @@ ci_cd_settings:
- group_runners_enabled
- merge_pipelines_enabled
- merge_trains_enabled
+ - merge_pipelines_enabled
+ - merge_trains_enabled
- auto_rollback_enabled
remapped_attributes:
default_git_depth: ci_default_git_depth
@@ -119,7 +121,6 @@ project_feature:
- project_id
- requirements_access_level
- security_and_compliance_access_level
- - container_registry_access_level
- updated_at
computed_attributes:
- issues_enabled
@@ -132,7 +133,6 @@ project_feature:
project_setting:
unexposed_attributes:
- - allow_editing_commit_messages
- created_at
- has_confluence
- has_vulnerabilities
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index 606279ec20a..8c9a93cf9fa 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe API::ProjectMilestones do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, namespace: user.namespace ) }
+ let_it_be_with_reload(:project) { create(:project, namespace: user.namespace ) }
let_it_be(:closed_milestone) { create(:closed_milestone, project: project, title: 'version1', description: 'closed milestone') }
let_it_be(:milestone) { create(:milestone, project: project, title: 'version2', description: 'open milestone') }
let_it_be(:route) { "/projects/#{project.id}/milestones" }
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index a869866c698..3622eedfed5 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -207,6 +207,18 @@ RSpec.describe API::Projects do
let(:current_user) { user }
end
+ it 'includes container_registry_access_level', :aggregate_failures do
+ project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED)
+
+ get api('/projects', user)
+ project_response = json_response.find { |p| p['id'] == project.id }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(project_response['container_registry_access_level']).to eq('disabled')
+ expect(project_response['container_registry_enabled']).to eq(false)
+ end
+
context 'when some projects are in a group' do
before do
create(:project, :public, group: create(:group))
@@ -219,7 +231,6 @@ RSpec.describe API::Projects do
end
it 'includes correct value of container_registry_enabled', :aggregate_failures do
- project.update_column(:container_registry_enabled, true)
project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED)
get api('/projects', user)
@@ -700,52 +711,112 @@ RSpec.describe API::Projects do
end
end
- context 'sorting by project statistics' do
- %w(repository_size storage_size wiki_size packages_size).each do |order_by|
- context "sorting by #{order_by}" do
- before do
- ProjectStatistics.update_all(order_by => 100)
- project4.statistics.update_columns(order_by => 10)
- project.statistics.update_columns(order_by => 200)
- end
+ context 'sorting' do
+ context 'by project statistics' do
+ %w(repository_size storage_size wiki_size packages_size).each do |order_by|
+ context "sorting by #{order_by}" do
+ before do
+ ProjectStatistics.update_all(order_by => 100)
+ project4.statistics.update_columns(order_by => 10)
+ project.statistics.update_columns(order_by => 200)
+ end
- context 'admin user' do
- let(:current_user) { admin }
+ context 'admin user' do
+ let(:current_user) { admin }
- context "when sorting by #{order_by} ascendingly" do
- it 'returns a properly sorted list of projects' do
- get api('/projects', current_user), params: { order_by: order_by, sort: :asc }
+ context "when sorting by #{order_by} ascendingly" do
+ it 'returns a properly sorted list of projects' do
+ get api('/projects', current_user), params: { order_by: order_by, sort: :asc }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.first['id']).to eq(project4.id)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.first['id']).to eq(project4.id)
+ end
+ end
+
+ context "when sorting by #{order_by} descendingly" do
+ it 'returns a properly sorted list of projects' do
+ get api('/projects', current_user), params: { order_by: order_by, sort: :desc }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.first['id']).to eq(project.id)
+ end
end
end
- context "when sorting by #{order_by} descendingly" do
- it 'returns a properly sorted list of projects' do
- get api('/projects', current_user), params: { order_by: order_by, sort: :desc }
+ context 'non-admin user' do
+ let(:current_user) { user }
+
+ it 'returns projects ordered normally' do
+ get api('/projects', current_user), params: { order_by: order_by }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.first['id']).to eq(project.id)
+ expect(json_response.map { |project| project['id'] }).to eq(user_projects.map(&:id).sort.reverse)
end
end
end
+ end
+ end
- context 'non-admin user' do
- let(:current_user) { user }
+ context 'by similarity', :aggregate_failures do
+ let_it_be(:group_with_projects) { create(:group) }
+ let_it_be(:project_1) { create(:project, name: 'Project', path: 'project', group: group_with_projects) }
+ let_it_be(:project_2) { create(:project, name: 'Test Project', path: 'test-project', group: group_with_projects) }
+ let_it_be(:project_3) { create(:project, name: 'Test', path: 'test', group: group_with_projects) }
+ let_it_be(:project_4) { create(:project, :public, name: 'Test Public Project') }
- it 'returns projects ordered normally' do
- get api('/projects', current_user), params: { order_by: order_by }
+ let(:current_user) { user }
+ let(:params) { { order_by: 'similarity', search: 'test' } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.map { |project| project['id'] }).to eq(user_projects.map(&:id).sort.reverse)
- end
+ subject { get api('/projects', current_user), params: params }
+
+ before do
+ group_with_projects.add_owner(current_user)
+ end
+
+ it 'returns non-public items based ordered by similarity' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(2)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to contain_exactly('Test', 'Test Project')
+ end
+
+ context 'when `search` parameter is not given' do
+ let(:params) { { order_by: 'similarity' } }
+
+ it 'returns items ordered by created_at descending' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(8)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to contain_exactly(project.name, project2.name, 'second_project', 'public_project', 'Project', 'Test Project', 'Test Public Project', 'Test')
+ end
+ end
+
+ context 'when called anonymously' do
+ let(:current_user) { nil }
+
+ it 'returns items ordered by created_at descending' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(1)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to contain_exactly('Test Public Project')
end
end
end
@@ -982,7 +1053,7 @@ RSpec.describe API::Projects do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it "assigns attributes to project" do
+ it "assigns attributes to project", :aggregate_failures do
project = attributes_for(:project, {
path: 'camelCasePath',
issues_enabled: false,
@@ -1004,6 +1075,7 @@ RSpec.describe API::Projects do
}).tap do |attrs|
attrs[:operations_access_level] = 'disabled'
attrs[:analytics_access_level] = 'disabled'
+ attrs[:container_registry_access_level] = 'private'
end
post api('/projects', user), params: project
@@ -1011,7 +1083,10 @@ RSpec.describe API::Projects do
expect(response).to have_gitlab_http_status(:created)
project.each_pair do |k, v|
- next if %i[has_external_issue_tracker has_external_wiki issues_enabled merge_requests_enabled wiki_enabled storage_version].include?(k)
+ next if %i[
+ has_external_issue_tracker has_external_wiki issues_enabled merge_requests_enabled wiki_enabled storage_version
+ container_registry_access_level
+ ].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
@@ -1023,6 +1098,28 @@ RSpec.describe API::Projects do
expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::DISABLED)
expect(project.operations_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.analytics_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'assigns container_registry_enabled to project', :aggregate_failures do
+ project = attributes_for(:project, { container_registry_enabled: true })
+
+ post api('/projects', user), params: project
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['container_registry_enabled']).to eq(true)
+ expect(json_response['container_registry_access_level']).to eq('enabled')
+ expect(Project.find_by(path: project[:path]).container_registry_access_level).to eq(ProjectFeature::ENABLED)
+ end
+
+ it 'assigns container_registry_enabled to project' do
+ project = attributes_for(:project, { container_registry_enabled: true })
+
+ post api('/projects', user), params: project
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['container_registry_enabled']).to eq(true)
+ expect(Project.find_by(path: project[:path]).container_registry_access_level).to eq(ProjectFeature::ENABLED)
end
it 'creates a project using a template' do
@@ -1280,6 +1377,14 @@ RSpec.describe API::Projects do
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
end
+ it 'includes container_registry_access_level', :aggregate_failures do
+ get api("/users/#{user4.id}/projects/", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first.keys).to include('container_registry_access_level')
+ end
+
context 'and using id_after' do
let!(:another_public_project) { create(:project, :public, name: 'another_public_project', creator_id: user4.id, namespace: user4.namespace) }
@@ -1464,6 +1569,18 @@ RSpec.describe API::Projects do
expect(json_response['error']).to eq('name is missing')
end
+ it 'sets container_registry_enabled' do
+ project = attributes_for(:project).tap do |attrs|
+ attrs[:container_registry_enabled] = true
+ end
+
+ post api("/projects/user/#{user.id}", admin), params: project
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['container_registry_enabled']).to eq(true)
+ expect(Project.find_by(path: project[:path]).container_registry_access_level).to eq(ProjectFeature::ENABLED)
+ end
+
it 'assigns attributes to project' do
project = attributes_for(:project, {
issues_enabled: false,
@@ -1589,6 +1706,59 @@ RSpec.describe API::Projects do
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy
end
+
+ context 'container_registry_enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:container_registry_enabled, :container_registry_access_level) do
+ true | ProjectFeature::ENABLED
+ false | ProjectFeature::DISABLED
+ end
+
+ with_them do
+ it 'setting container_registry_enabled also sets container_registry_access_level', :aggregate_failures do
+ project_attributes = attributes_for(:project).tap do |attrs|
+ attrs[:container_registry_enabled] = container_registry_enabled
+ end
+
+ post api("/projects/user/#{user.id}", admin), params: project_attributes
+
+ project = Project.find_by(path: project_attributes[:path])
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['container_registry_access_level']).to eq(ProjectFeature.str_from_access_level(container_registry_access_level))
+ expect(json_response['container_registry_enabled']).to eq(container_registry_enabled)
+ expect(project.container_registry_access_level).to eq(container_registry_access_level)
+ expect(project.container_registry_enabled).to eq(container_registry_enabled)
+ end
+ end
+ end
+
+ context 'container_registry_access_level' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:container_registry_access_level, :container_registry_enabled) do
+ 'enabled' | true
+ 'private' | true
+ 'disabled' | false
+ end
+
+ with_them do
+ it 'setting container_registry_access_level also sets container_registry_enabled', :aggregate_failures do
+ project_attributes = attributes_for(:project).tap do |attrs|
+ attrs[:container_registry_access_level] = container_registry_access_level
+ end
+
+ post api("/projects/user/#{user.id}", admin), params: project_attributes
+
+ project = Project.find_by(path: project_attributes[:path])
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['container_registry_access_level']).to eq(container_registry_access_level)
+ expect(json_response['container_registry_enabled']).to eq(container_registry_enabled)
+ expect(project.container_registry_access_level).to eq(ProjectFeature.access_level_from_str(container_registry_access_level))
+ expect(project.container_registry_enabled).to eq(container_registry_enabled)
+ end
+ end
+ end
end
describe "POST /projects/:id/uploads/authorize" do
@@ -1974,6 +2144,7 @@ RSpec.describe API::Projects do
expect(json_response['jobs_enabled']).to be_present
expect(json_response['snippets_enabled']).to be_present
expect(json_response['container_registry_enabled']).to be_present
+ expect(json_response['container_registry_access_level']).to be_present
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
expect(json_response['shared_runners_enabled']).to be_present
@@ -2065,6 +2236,7 @@ RSpec.describe API::Projects do
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['remove_source_branch_after_merge']).to be_truthy
expect(json_response['container_registry_enabled']).to be_present
+ expect(json_response['container_registry_access_level']).to be_present
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
expect(json_response['shared_runners_enabled']).to be_present
@@ -2865,6 +3037,59 @@ RSpec.describe API::Projects do
end
end
+ describe 'POST /projects/:id/import_project_members/:project_id' do
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:project2_user) { create(:user) }
+
+ before_all do
+ project.add_maintainer(user)
+ project2.add_maintainer(user)
+ project2.add_developer(project2_user)
+ end
+
+ it 'returns 200 when it successfully imports members from another project' do
+ expect do
+ post api("/projects/#{project.id}/import_project_members/#{project2.id}", user)
+ end.to change { project.members.count }.by(2)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['message']).to eq('Successfully imported')
+ end
+
+ it 'returns 404 if the source project does not exist' do
+ expect do
+ post api("/projects/#{project.id}/import_project_members/#{non_existing_record_id}", user)
+ end.not_to change { project.members.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Project Not Found')
+ end
+
+ it 'returns 404 if the target project members cannot be administered by the requester' do
+ private_project = create(:project, :private)
+
+ expect do
+ post api("/projects/#{private_project.id}/import_project_members/#{project2.id}", user)
+ end.not_to change { project.members.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Project Not Found')
+ end
+
+ it 'returns 422 if the import failed for valid projects' do
+ allow_next_instance_of(::ProjectTeam) do |project_team|
+ allow(project_team).to receive(:import).and_return(false)
+ end
+
+ expect do
+ post api("/projects/#{project.id}/import_project_members/#{project2.id}", user)
+ end.not_to change { project.members.count }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('Import failed')
+ end
+ end
+
describe 'PUT /projects/:id' do
before do
expect(project).to be_persisted
@@ -2891,6 +3116,24 @@ RSpec.describe API::Projects do
end
end
+ it 'sets container_registry_access_level', :aggregate_failures do
+ put api("/projects/#{project.id}", user), params: { container_registry_access_level: 'private' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['container_registry_access_level']).to eq('private')
+ expect(Project.find_by(path: project[:path]).container_registry_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'sets container_registry_enabled' do
+ project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED)
+
+ put(api("/projects/#{project.id}", user), params: { container_registry_enabled: true })
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['container_registry_enabled']).to eq(true)
+ expect(project.reload.container_registry_access_level).to eq(ProjectFeature::ENABLED)
+ end
+
it 'returns 400 when nothing sent' do
project_param = {}
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index e66326db2a2..8df2460a2b6 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe API::PypiPackages do
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
- let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ let_it_be(:job) { create(:ci_build, :running, user: user) }
let(:headers) { {} }
@@ -23,7 +23,8 @@ RSpec.describe API::PypiPackages do
subject { get api(url), headers: headers }
describe 'GET /api/v4/groups/:id/-/packages/pypi/simple/:package_name' do
- let(:url) { "/groups/#{group.id}/-/packages/pypi/simple/#{package.name}" }
+ let(:package_name) { package.name }
+ let(:url) { "/groups/#{group.id}/-/packages/pypi/simple/#{package_name}" }
let(:snowplow_gitlab_standard_context) { {} }
it_behaves_like 'pypi simple API endpoint'
@@ -38,6 +39,12 @@ RSpec.describe API::PypiPackages do
end
it_behaves_like 'deploy token for package GET requests'
+
+ context 'with group path as id' do
+ let(:url) { "/groups/#{CGI.escape(group.full_path)}/-/packages/pypi/simple/#{package_name}"}
+
+ it_behaves_like 'deploy token for package GET requests'
+ end
end
context 'job token' do
@@ -54,13 +61,20 @@ RSpec.describe API::PypiPackages do
end
describe 'GET /api/v4/projects/:id/packages/pypi/simple/:package_name' do
- let(:url) { "/projects/#{project.id}/packages/pypi/simple/#{package.name}" }
+ let(:package_name) { package.name }
+ let(:url) { "/projects/#{project.id}/packages/pypi/simple/#{package_name}" }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
it_behaves_like 'pypi simple API endpoint'
it_behaves_like 'rejects PyPI access with unknown project id'
it_behaves_like 'deploy token for package GET requests'
it_behaves_like 'job token for package GET requests'
+
+ context 'with project path as id' do
+ let(:url) { "/projects/#{CGI.escape(project.full_path)}/packages/pypi/simple/#{package.name}" }
+
+ it_behaves_like 'deploy token for package GET requests'
+ end
end
end
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 03e0954e5ab..87b08587904 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -811,7 +811,7 @@ RSpec.describe API::Releases do
end
context 'when using JOB-TOKEN auth' do
- let(:job) { create(:ci_build, user: maintainer, project: project) }
+ let(:job) { create(:ci_build, user: maintainer) }
let(:params) do
{
name: 'Another release',
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index d019e89e0b4..d3262b8056b 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -107,13 +107,18 @@ RSpec.describe API::Repositories do
shared_examples_for 'repository blob' do
it 'returns blob attributes as json' do
+ stub_const("Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE", 5)
+
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['size']).to eq(111)
expect(json_response['encoding']).to eq("base64")
- expect(Base64.decode64(json_response['content']).lines.first).to eq("class Commit\n")
expect(json_response['sha']).to eq(sample_blob.oid)
+
+ content = Base64.decode64(json_response['content'])
+ expect(content.lines.first).to eq("class Commit\n")
+ expect(content).to eq(project.repository.gitaly_blob_client.get_blob(oid: sample_blob.oid, limit: -1).data)
end
context 'when sha does not exist' do
@@ -164,7 +169,10 @@ RSpec.describe API::Repositories do
shared_examples_for 'repository raw blob' do
it 'returns the repository raw blob' do
- expect(Gitlab::Workhorse).to receive(:send_git_blob)
+ expect(Gitlab::Workhorse).to receive(:send_git_blob) do |_, blob|
+ expect(blob.id).to eq(sample_blob.oid)
+ expect(blob.loaded_size).to eq(0)
+ end
get api(route, current_user)
diff --git a/spec/requests/api/rubygem_packages_spec.rb b/spec/requests/api/rubygem_packages_spec.rb
index 9b104520b52..afa7adad80c 100644
--- a/spec/requests/api/rubygem_packages_spec.rb
+++ b/spec/requests/api/rubygem_packages_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe API::RubygemPackages do
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:personal_access_token) { create(:personal_access_token) }
let_it_be(:user) { personal_access_token.user }
- let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ let_it_be(:job) { create(:ci_build, :running, user: user) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let_it_be(:headers) { {} }
diff --git a/spec/requests/api/statistics_spec.rb b/spec/requests/api/statistics_spec.rb
index eab97b6916e..baffb2792e9 100644
--- a/spec/requests/api/statistics_spec.rb
+++ b/spec/requests/api/statistics_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe API::Statistics, 'Statistics' do
# Make sure the reltuples have been updated
# to get a correct count on postgresql
tables_to_analyze.each do |table|
- ActiveRecord::Base.connection.execute("ANALYZE #{table}")
+ ApplicationRecord.connection.execute("ANALYZE #{table}")
end
get api(path, admin)
diff --git a/spec/requests/api/terraform/modules/v1/packages_spec.rb b/spec/requests/api/terraform/modules/v1/packages_spec.rb
index b04f5ad9a94..6803c09b8c2 100644
--- a/spec/requests/api/terraform/modules/v1/packages_spec.rb
+++ b/spec/requests/api/terraform/modules/v1/packages_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
let_it_be(:package) { create(:terraform_module_package, project: project) }
let_it_be(:personal_access_token) { create(:personal_access_token) }
let_it_be(:user) { personal_access_token.user }
- let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ let_it_be(:job) { create(:ci_build, :running, user: user) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
diff --git a/spec/requests/api/user_counts_spec.rb b/spec/requests/api/user_counts_spec.rb
index 94e25d647fc..ab2aa87d1b7 100644
--- a/spec/requests/api/user_counts_spec.rb
+++ b/spec/requests/api/user_counts_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe API::UserCounts do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
+ let_it_be(:todo) { create(:todo, :pending, user: user, project: project) }
let!(:merge_request) { create(:merge_request, :simple, author: user, assignees: [user], source_project: project, title: "Test") }
@@ -18,22 +20,36 @@ RSpec.describe API::UserCounts do
end
context 'when authenticated' do
- it 'returns open counts for current user' do
+ it 'returns assigned issue counts for current_user' do
get api('/user_counts', user)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_a Hash
- expect(json_response['merge_requests']).to eq(1)
+ expect(json_response['assigned_issues']).to eq(1)
end
- it 'updates the mr count when a new mr is assigned' do
- create(:merge_request, source_project: project, author: user, assignees: [user])
+ context 'merge requests' do
+ it 'returns assigned MR counts for current user' do
+ get api('/user_counts', user)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_a Hash
+ expect(json_response['merge_requests']).to eq(1)
+ end
+
+ it 'updates the mr count when a new mr is assigned' do
+ create(:merge_request, source_project: project, author: user, assignees: [user])
+
+ get api('/user_counts', user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_a Hash
+ expect(json_response['merge_requests']).to eq(2)
+ end
+ end
+
+ it 'returns pending todo counts for current_user' do
get api('/user_counts', user)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_a Hash
- expect(json_response['merge_requests']).to eq(2)
+ expect(json_response['todos']).to eq(1)
end
end
end
diff --git a/spec/requests/api/v3/github_spec.rb b/spec/requests/api/v3/github_spec.rb
index 4100b246218..255f53e4c7c 100644
--- a/spec/requests/api/v3/github_spec.rb
+++ b/spec/requests/api/v3/github_spec.rb
@@ -472,6 +472,17 @@ RSpec.describe API::V3::Github do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ context 'when the project has no repository', :aggregate_failures do
+ let_it_be(:project) { create(:project, creator: user) }
+
+ it 'returns an empty collection response' do
+ jira_get v3_api("/repos/#{project.namespace.path}/#{project.path}/branches", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ end
+ end
end
context 'unauthenticated' do
diff --git a/spec/requests/customers_dot/proxy_controller_spec.rb b/spec/requests/customers_dot/proxy_controller_spec.rb
deleted file mode 100644
index 4938c67e0c3..00000000000
--- a/spec/requests/customers_dot/proxy_controller_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe CustomersDot::ProxyController, type: :request do
- describe 'POST graphql' do
- let_it_be(:customers_dot) { "#{Gitlab::SubscriptionPortal::SUBSCRIPTIONS_URL}/graphql" }
-
- it 'forwards request body to customers dot' do
- request_params = '{ "foo" => "bar" }'
-
- stub_request(:post, customers_dot)
-
- post customers_dot_proxy_graphql_path, params: request_params
-
- expect(WebMock).to have_requested(:post, customers_dot).with(body: request_params)
- end
-
- it 'responds with customers dot status' do
- stub_request(:post, customers_dot).to_return(status: 500)
-
- post customers_dot_proxy_graphql_path
-
- expect(response).to have_gitlab_http_status(:internal_server_error)
- end
-
- it 'responds with customers dot response body' do
- customers_dot_response = 'foo'
-
- stub_request(:post, customers_dot).to_return(body: customers_dot_response)
-
- post customers_dot_proxy_graphql_path
-
- expect(response.body).to eq(customers_dot_response)
- end
- end
-end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 34f8a479719..e4a0c034b20 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -267,7 +267,7 @@ RSpec.describe 'Git HTTP requests' do
it "responds to pulls with the wiki's repo" do
download(path) do |response|
- json_body = ActiveSupport::JSON.decode(response.body)
+ json_body = Gitlab::Json.parse(response.body)
expect(json_body['Repository']['relative_path']).to eq(wiki.repository.relative_path)
end
@@ -1610,7 +1610,7 @@ RSpec.describe 'Git HTTP requests' do
it "responds to pulls with the wiki's repo" do
download(path) do |response|
- json_body = ActiveSupport::JSON.decode(response.body)
+ json_body = Gitlab::Json.parse(response.body)
expect(json_body['Repository']['relative_path']).to eq(wiki.repository.relative_path)
end
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 55577a5dc65..70097234762 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe JwtController do
it 'authenticates correctly' do
expect(response).to have_gitlab_http_status(:ok)
- expect(service_class).to have_received(:new).with(nil, deploy_token, ActionController::Parameters.new(parameters).permit!)
+ expect(service_class).to have_received(:new).with(nil, nil, ActionController::Parameters.new(parameters.merge(deploy_token: deploy_token)).permit!)
end
it 'does not log a user' do
@@ -224,8 +224,10 @@ RSpec.describe JwtController do
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :private, group: group) }
- let_it_be(:group_deploy_token) { create(:deploy_token, :group, groups: [group]) }
- let_it_be(:project_deploy_token) { create(:deploy_token, :project, projects: [project]) }
+ let_it_be(:group_deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+ let_it_be(:gdeploy_token) { create(:group_deploy_token, deploy_token: group_deploy_token, group: group) }
+ let_it_be(:project_deploy_token) { create(:deploy_token, :project, :dependency_proxy_scopes) }
+ let_it_be(:pdeploy_token) { create(:project_deploy_token, deploy_token: project_deploy_token, project: project) }
let_it_be(:service_name) { 'dependency_proxy' }
let(:headers) { { authorization: credentials(credential_user, credential_password) } }
@@ -264,7 +266,7 @@ RSpec.describe JwtController do
let(:credential_user) { group_deploy_token.username }
let(:credential_password) { group_deploy_token.token }
- it_behaves_like 'returning response status', :forbidden
+ it_behaves_like 'with valid credentials'
end
context 'with project deploy token' do
@@ -274,6 +276,28 @@ RSpec.describe JwtController do
it_behaves_like 'returning response status', :forbidden
end
+ context 'with revoked group deploy token' do
+ let(:credential_user) { group_deploy_token.username }
+ let(:credential_password) { project_deploy_token.token }
+
+ before do
+ group_deploy_token.update_column(:revoked, true)
+ end
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
+ context 'with group deploy token with insufficient scopes' do
+ let(:credential_user) { group_deploy_token.username }
+ let(:credential_password) { project_deploy_token.token }
+
+ before do
+ group_deploy_token.update_column(:write_registry, false)
+ end
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
context 'with invalid credentials' do
let(:credential_user) { 'foo' }
let(:credential_password) { 'bar' }
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 7921fdcb0de..89d46b64311 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -8,6 +8,9 @@ RSpec.describe 'value stream analytics events' do
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
+ let(:first_issue_iid) { project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s }
+ let(:first_mr_iid) { project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s }
+
before do
project.add_developer(user)
@@ -25,8 +28,6 @@ RSpec.describe 'value stream analytics events' do
it 'lists the issue events' do
get project_cycle_analytics_issue_path(project, format: :json)
- first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
-
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
@@ -34,8 +35,6 @@ RSpec.describe 'value stream analytics events' do
it 'lists the plan events' do
get project_cycle_analytics_plan_path(project, format: :json)
- first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
-
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
@@ -45,8 +44,6 @@ RSpec.describe 'value stream analytics events' do
expect(json_response['events']).not_to be_empty
- first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
-
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
@@ -54,15 +51,15 @@ RSpec.describe 'value stream analytics events' do
get project_cycle_analytics_test_path(project, format: :json)
expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['date']).not_to be_empty
+
+ expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
it 'lists the review events' do
get project_cycle_analytics_review_path(project, format: :json)
- first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
-
expect(json_response['events']).not_to be_empty
+
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
@@ -70,7 +67,8 @@ RSpec.describe 'value stream analytics events' do
get project_cycle_analytics_staging_path(project, format: :json)
expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['date']).not_to be_empty
+
+ expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end
context 'with private project and builds' do
diff --git a/spec/requests/projects/merge_requests/diffs_spec.rb b/spec/requests/projects/merge_requests/diffs_spec.rb
index 3a64c88acc1..349cbf1b76c 100644
--- a/spec/requests/projects/merge_requests/diffs_spec.rb
+++ b/spec/requests/projects/merge_requests/diffs_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe 'Merge Requests Diffs' do
merge_request: merge_request,
diff_view: :inline,
merge_ref_head_diff: nil,
+ allow_tree_conflicts: true,
pagination_data: {
total_pages: nil
}.merge(pagination_data)
@@ -75,6 +76,78 @@ RSpec.describe 'Merge Requests Diffs' do
subject
end
+ context 'with the different user' do
+ let(:another_user) { create(:user) }
+
+ before do
+ project.add_maintainer(another_user)
+ sign_in(another_user)
+ end
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+ end
+ end
+
+ context 'with a new unfoldable diff position' do
+ let(:unfoldable_position) do
+ create(:diff_position)
+ end
+
+ before do
+ expect_next_instance_of(Gitlab::Diff::PositionCollection) do |instance|
+ expect(instance)
+ .to receive(:unfoldable)
+ .and_return([unfoldable_position])
+ end
+ end
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+ end
+ end
+
+ context 'with a new environment' do
+ let(:environment) do
+ create(:environment, :available, project: project)
+ end
+
+ let!(:deployment) do
+ create(:deployment, :success, environment: environment, ref: merge_request.source_branch)
+ end
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(environment: environment) }
+ end
+ end
+
+ context 'with disabled display_merge_conflicts_in_diff feature' do
+ before do
+ stub_feature_flags(display_merge_conflicts_in_diff: false)
+ end
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(allow_tree_conflicts: false) }
+ end
+ end
+
+ context 'with diff_head option' do
+ subject { go(page: 0, per_page: 5, diff_head: true) }
+
+ before do
+ merge_request.create_merge_head_diff!
+ end
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_ref_head_diff: true) }
+ end
+ end
+
context 'with the different pagination option' do
subject { go(page: 5, per_page: 5) }
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index 595222a9eb2..c68745b9271 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -54,7 +54,9 @@ RSpec.describe 'merge requests discussions' do
end
context 'caching', :use_clean_rails_memory_store_caching do
- let!(:first_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) }
+ let(:reference) { create(:issue, project: project) }
+ let(:author) { create(:user) }
+ let!(:first_note) { create(:diff_note_on_merge_request, author: author, noteable: merge_request, project: project, note: "reference: #{reference.to_reference}") }
let!(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
let!(:award_emoji) { create(:award_emoji, awardable: first_note) }
@@ -93,6 +95,16 @@ RSpec.describe 'merge requests discussions' do
end
end
+ context 'when a note in a discussion got its reference state updated' do
+ before do
+ reference.close!
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
context 'when a note in a discussion got resolved' do
before do
travel_to(1.minute.from_now) do
@@ -147,17 +159,6 @@ RSpec.describe 'merge requests discussions' do
end
end
- context 'when cached markdown version gets bump' do
- before do
- settings = Gitlab::CurrentSettings.current_application_settings
- settings.update!(local_markdown_version: settings.local_markdown_version + 1)
- end
-
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
- end
- end
-
context 'when the diff note position changes' do
before do
# This replicates a position change wherein timestamps aren't updated
@@ -181,6 +182,53 @@ RSpec.describe 'merge requests discussions' do
end
end
+ context 'when the HEAD diff note position changes' do
+ before do
+ # This replicates a DiffNotePosition change. This is the same approach
+ # being used in Discussions::CaptureDiffNotePositionService which is
+ # responsible for updating/creating DiffNotePosition of a diff discussions
+ # in relation to HEAD diff.
+ new_position = Gitlab::Diff::Position.new(
+ old_path: first_note.position.old_path,
+ new_path: first_note.position.new_path,
+ old_line: first_note.position.old_line,
+ new_line: first_note.position.new_line + 1,
+ diff_refs: first_note.position.diff_refs
+ )
+
+ DiffNotePosition.create_or_update_for(
+ first_note,
+ diff_type: :head,
+ position: new_position,
+ line_code: 'bd4b7bfff3a247ccf6e3371c41ec018a55230bcc_534_521'
+ )
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when author detail changes' do
+ before do
+ author.update!(name: "#{author.name} (Updated)")
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when author status changes' do
+ before do
+ Users::SetStatusService.new(author, message: "updated status").execute
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
context 'when merge_request_discussion_cache is disabled' do
before do
stub_feature_flags(merge_request_discussion_cache: false)
diff --git a/spec/requests/projects/merge_requests_spec.rb b/spec/requests/projects/merge_requests_spec.rb
new file mode 100644
index 00000000000..59fde803560
--- /dev/null
+++ b/spec/requests/projects/merge_requests_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'merge requests actions' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:merge_request) do
+ create(:merge_request_with_diffs, target_project: project,
+ source_project: project,
+ assignees: [user],
+ reviewers: [user2])
+ end
+
+ let(:user) { project.owner }
+ let(:user2) { create(:user) }
+
+ before do
+ project.add_maintainer(user2)
+ sign_in(user)
+ end
+
+ describe 'GET /:namespace/:project/-/merge_requests/:iid' do
+ describe 'as json' do
+ def send_request(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ format: :json
+ }
+
+ get namespace_project_merge_request_path(params.merge(extra_params))
+ end
+
+ context 'with caching', :use_clean_rails_memory_store_caching do
+ let(:params) { {} }
+
+ context 'for sidebar_extras' do
+ let(:params) { { serializer: 'sidebar_extras' } }
+
+ shared_examples_for 'a non-cached request' do
+ it 'serializes merge request' do
+ expect_next_instance_of(MergeRequestSerializer) do |instance|
+ expect(instance).to receive(:represent)
+ .with(an_instance_of(MergeRequest), serializer: 'sidebar_extras')
+ .and_call_original
+ end
+
+ send_request(params)
+ end
+ end
+
+ context 'when the request has not been cached' do
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the request has already been cached' do
+ before do
+ send_request(params)
+ end
+
+ it 'does not serialize merge request again' do
+ expect_next_instance_of(MergeRequestSerializer) do |instance|
+ expect(instance).not_to receive(:represent)
+ end
+
+ send_request(params)
+ end
+
+ context 'when the merge request is updated' do
+ def update_service(params)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: params).execute(merge_request)
+ end
+
+ context 'when the logged in user is different' do
+ before do
+ sign_in(user2)
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the assignee is changed' do
+ before do
+ update_service( assignee_ids: [] )
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the existing assignee gets updated' do
+ before do
+ user.update_attribute(:avatar, 'uploads/avatar.png')
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the reviewer is changed' do
+ before do
+ update_service(reviewer_ids: [])
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the existing reviewer gets updated' do
+ before do
+ user2.update_attribute(:avatar, 'uploads/avatar.png')
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the time_estimate is changed' do
+ before do
+ update_service(time_estimate: 7200)
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when the spend_time is changed' do
+ before do
+ update_service(spend_time: { duration: 7200, user_id: user.id, spent_at: Time.now, note_id: nil })
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+
+ context 'when a user leaves a note' do
+ before do
+ # We have 1 minute ThrottledTouch to account for.
+ # It's not ideal as it means that our participants cache could be stale for about a day if a new note is created by another person or gets a mention.
+ travel_to(Time.current + 61) do
+ Notes::CreateService.new(project, user2, { note: 'Looks good', noteable_type: 'MergeRequest', noteable_id: merge_request.id }).execute
+ end
+ end
+
+ it_behaves_like 'a non-cached request'
+ end
+ end
+ end
+ end
+
+ context 'for other serializer' do
+ let(:params) { { serializer: 'basic' } }
+
+ it 'does not use cache' do
+ expect(Rails.cache).not_to receive(:fetch).with(/cache:gitlab:MergeRequestSerializer:/).and_call_original
+
+ send_request(params)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/robots_txt_spec.rb b/spec/requests/robots_txt_spec.rb
index a8be4093a71..f6c9b018c68 100644
--- a/spec/requests/robots_txt_spec.rb
+++ b/spec/requests/robots_txt_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe 'Robots.txt Requests', :aggregate_failures do
it 'blocks the requests' do
requests = [
+ Gitlab::Experiment::Configuration.mount_at,
'/autocomplete/users',
'/autocomplete/projects',
'/search',
diff --git a/spec/rubocop/cop/gitlab/bulk_insert_spec.rb b/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
index 7c60518f890..bbc8f381d01 100644
--- a/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
+++ b/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
@@ -6,17 +6,17 @@ require_relative '../../../../rubocop/cop/gitlab/bulk_insert'
RSpec.describe RuboCop::Cop::Gitlab::BulkInsert do
subject(:cop) { described_class.new }
- it 'flags the use of Gitlab::Database.bulk_insert' do
+ it 'flags the use of Gitlab::Database.main.bulk_insert' do
expect_offense(<<~SOURCE)
- Gitlab::Database.bulk_insert('merge_request_diff_files', rows)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `BulkInsertSafe` concern, [...]
+ Gitlab::Database.main.bulk_insert('merge_request_diff_files', rows)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `BulkInsertSafe` concern, [...]
SOURCE
end
- it 'flags the use of ::Gitlab::Database.bulk_insert' do
+ it 'flags the use of ::Gitlab::Database.main.bulk_insert' do
expect_offense(<<~SOURCE)
- ::Gitlab::Database.bulk_insert('merge_request_diff_files', rows)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `BulkInsertSafe` concern, [...]
+ ::Gitlab::Database.main.bulk_insert('merge_request_diff_files', rows)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `BulkInsertSafe` concern, [...]
SOURCE
end
end
diff --git a/spec/rubocop/cop/gitlab/json_spec.rb b/spec/rubocop/cop/gitlab/json_spec.rb
index 66b2c675e80..7998f26da4e 100644
--- a/spec/rubocop/cop/gitlab/json_spec.rb
+++ b/spec/rubocop/cop/gitlab/json_spec.rb
@@ -6,7 +6,7 @@ require_relative '../../../../rubocop/cop/gitlab/json'
RSpec.describe RuboCop::Cop::Gitlab::Json do
subject(:cop) { described_class.new }
- context 'when JSON is called' do
+ context 'when ::JSON is called' do
it 'registers an offense' do
expect_offense(<<~RUBY)
class Foo
@@ -18,4 +18,17 @@ RSpec.describe RuboCop::Cop::Gitlab::Json do
RUBY
end
end
+
+ context 'when ActiveSupport::JSON is called' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class Foo
+ def bar
+ ActiveSupport::JSON.parse('{ "foo": "bar" }')
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid calling `JSON` directly. [...]
+ end
+ end
+ RUBY
+ end
+ end
end
diff --git a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
index 968cafc57d4..35b21477d80 100644
--- a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
+++ b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
@@ -194,6 +194,10 @@ RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
include_examples 'sets flag as used', 'self.limit_feature_flag = :foo', 'foo'
end
+ describe 'self.limit_feature_flag_for_override = :foo' do
+ include_examples 'sets flag as used', 'self.limit_feature_flag_for_override = :foo', 'foo'
+ end
+
describe 'FEATURE_FLAG = :foo' do
include_examples 'sets flag as used', 'FEATURE_FLAG = :foo', 'foo'
end
@@ -218,7 +222,7 @@ RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
include_examples 'does not set any flags as used', 'field :solution'
include_examples 'does not set any flags as used', 'field :runners, Types::Ci::RunnerType.connection_type'
include_examples 'does not set any flags as used', 'field :runners, Types::Ci::RunnerType.connection_type, null: true, description: "hello world"'
- include_examples 'does not set any flags as used', 'field :solution, type: GraphQL::STRING_TYPE, null: true, description: "URL to the vulnerabilitys details page."'
+ include_examples 'does not set any flags as used', 'field :solution, type: GraphQL::Types::String, null: true, description: "URL to the vulnerabilitys details page."'
end
describe "tracking of usage data metrics known events happens at the beginning of inspection" do
diff --git a/spec/rubocop/cop/graphql/descriptions_spec.rb b/spec/rubocop/cop/graphql/descriptions_spec.rb
index 9709a253bdc..84520a89b08 100644
--- a/spec/rubocop/cop/graphql/descriptions_spec.rb
+++ b/spec/rubocop/cop/graphql/descriptions_spec.rb
@@ -12,8 +12,8 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- ^^^^^^^^^^^^^^^ Please add a `description` property.
- GraphQL::STRING_TYPE,
+ ^^^^^^^^^^^^^^^ #{described_class::MSG_NO_DESCRIPTION}
+ GraphQL::Types::String,
null: false
end
end
@@ -25,10 +25,38 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- ^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
- GraphQL::STRING_TYPE,
+ ^^^^^^^^^^^^^^^ #{described_class::MSG_NO_PERIOD}
+ GraphQL::Types::String,
null: false,
- description: 'A descriptive description'
+ description: 'Description of a thing'
+ end
+ end
+ TYPE
+ end
+
+ it 'adds an offense when description begins with "A"' do
+ expect_offense(<<~TYPE)
+ module Types
+ class FakeType < BaseObject
+ field :a_thing,
+ ^^^^^^^^^^^^^^^ #{described_class::MSG_BAD_START}
+ GraphQL::Types::String,
+ null: false,
+ description: 'A description of the thing.'
+ end
+ end
+ TYPE
+ end
+
+ it 'adds an offense when description begins with "The"' do
+ expect_offense(<<~TYPE)
+ module Types
+ class FakeType < BaseObject
+ field :a_thing,
+ ^^^^^^^^^^^^^^^ #{described_class::MSG_BAD_START}
+ GraphQL::Types::String,
+ null: false,
+ description: 'The description of the thing.'
end
end
TYPE
@@ -39,9 +67,9 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- GraphQL::STRING_TYPE,
+ GraphQL::Types::String,
null: false,
- description: 'A descriptive description.'
+ description: 'Description of a thing.'
end
end
TYPE
@@ -64,8 +92,8 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
argument :a_thing,
- ^^^^^^^^^^^^^^^^^^ Please add a `description` property.
- GraphQL::STRING_TYPE,
+ ^^^^^^^^^^^^^^^^^^ #{described_class::MSG_NO_DESCRIPTION}
+ GraphQL::Types::String,
null: false
end
end
@@ -77,8 +105,8 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
argument :a_thing,
- ^^^^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
- GraphQL::STRING_TYPE,
+ ^^^^^^^^^^^^^^^^^^ #{described_class::MSG_NO_PERIOD}
+ GraphQL::Types::String,
null: false,
description: 'Behold! A description'
end
@@ -86,12 +114,40 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
TYPE
end
+ it 'adds an offense when description begins with "A"' do
+ expect_offense(<<~TYPE)
+ module Types
+ class FakeType < BaseObject
+ argument :a_thing,
+ ^^^^^^^^^^^^^^^^^^ #{described_class::MSG_BAD_START}
+ GraphQL::Types::String,
+ null: false,
+ description: 'A description.'
+ end
+ end
+ TYPE
+ end
+
+ it 'adds an offense when description begins with "The"' do
+ expect_offense(<<~TYPE)
+ module Types
+ class FakeType < BaseObject
+ argument :a_thing,
+ ^^^^^^^^^^^^^^^^^^ #{described_class::MSG_BAD_START}
+ GraphQL::Types::String,
+ null: false,
+ description: 'The description.'
+ end
+ end
+ TYPE
+ end
+
it 'does not add an offense when description is correct' do
expect_no_offenses(<<~TYPE.strip)
module Types
class FakeType < BaseObject
argument :a_thing,
- GraphQL::STRING_TYPE,
+ GraphQL::Types::String,
null: false,
description: 'Behold! A description.'
end
@@ -106,7 +162,7 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeEnum < BaseEnum
value 'FOO', value: 'foo'
- ^^^^^^^^^^^^^^^^^^^^^^^^^ Please add a `description` property.
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG_NO_DESCRIPTION}
end
end
TYPE
@@ -117,7 +173,29 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeEnum < BaseEnum
value 'FOO', value: 'foo', description: 'bar'
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG_NO_PERIOD}
+ end
+ end
+ TYPE
+ end
+
+ it 'adds an offense when description begins with "The"' do
+ expect_offense(<<~TYPE.strip)
+ module Types
+ class FakeEnum < BaseEnum
+ value 'FOO', value: 'foo', description: 'The description.'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG_BAD_START}
+ end
+ end
+ TYPE
+ end
+
+ it 'adds an offense when description begins with "A"' do
+ expect_offense(<<~TYPE.strip)
+ module Types
+ class FakeEnum < BaseEnum
+ value 'FOO', value: 'foo', description: 'A description.'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG_BAD_START}
end
end
TYPE
@@ -150,8 +228,8 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- ^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
- GraphQL::STRING_TYPE,
+ ^^^^^^^^^^^^^^^ #{described_class::MSG_NO_PERIOD}
+ GraphQL::Types::String,
null: false,
description: 'Behold! A description'
end
@@ -162,7 +240,7 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- GraphQL::STRING_TYPE,
+ GraphQL::Types::String,
null: false,
description: 'Behold! A description.'
end
@@ -175,8 +253,8 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- ^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
- GraphQL::STRING_TYPE,
+ ^^^^^^^^^^^^^^^ #{described_class::MSG_NO_PERIOD}
+ GraphQL::Types::String,
null: false,
description: <<~DESC
Behold! A description
@@ -189,7 +267,7 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
module Types
class FakeType < BaseObject
field :a_thing,
- GraphQL::STRING_TYPE,
+ GraphQL::Types::String,
null: false,
description: <<~DESC
Behold! A description.
diff --git a/spec/rubocop/cop/graphql/id_type_spec.rb b/spec/rubocop/cop/graphql/id_type_spec.rb
index a566488b118..d71031c6e1a 100644
--- a/spec/rubocop/cop/graphql/id_type_spec.rb
+++ b/spec/rubocop/cop/graphql/id_type_spec.rb
@@ -7,10 +7,10 @@ require_relative '../../../../rubocop/cop/graphql/id_type'
RSpec.describe RuboCop::Cop::Graphql::IDType do
subject(:cop) { described_class.new }
- it 'adds an offense when GraphQL::ID_TYPE is used as a param to #argument' do
+ it 'adds an offense when GraphQL::Types::ID is used as a param to #argument' do
expect_offense(<<~TYPE)
- argument :some_arg, GraphQL::ID_TYPE, some: other, params: do_not_matter
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use GraphQL::ID_TYPE, use a specific GlobalIDType instead
+ argument :some_arg, GraphQL::Types::ID, some: other, params: do_not_matter
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use GraphQL::Types::ID, use a specific GlobalIDType instead
TYPE
end
@@ -18,13 +18,13 @@ RSpec.describe RuboCop::Cop::Graphql::IDType do
RuboCop::Cop::Graphql::IDType::WHITELISTED_ARGUMENTS.each do |arg|
it "does not add an offense for calls to #argument with #{arg} as argument name" do
expect_no_offenses(<<~TYPE.strip)
- argument #{arg}, GraphQL::ID_TYPE, some: other, params: do_not_matter
+ argument #{arg}, GraphQL::Types::ID, some: other, params: do_not_matter
TYPE
end
end
end
- it 'does not add an offense for calls to #argument without GraphQL::ID_TYPE' do
+ it 'does not add an offense for calls to #argument without GraphQL::Types::ID' do
expect_no_offenses(<<~TYPE.strip)
argument :some_arg, ::Types::GlobalIDType[::Awardable], some: other, params: do_not_matter
TYPE
diff --git a/spec/rubocop/cop/graphql/json_type_spec.rb b/spec/rubocop/cop/graphql/json_type_spec.rb
index 50437953c1d..882e2b2ef88 100644
--- a/spec/rubocop/cop/graphql/json_type_spec.rb
+++ b/spec/rubocop/cop/graphql/json_type_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe RuboCop::Cop::Graphql::JSONType do
it 'does not add an offense for other types' do
expect_no_offenses(<<~RUBY.strip)
class MyType
- field :some_field, GraphQL::STRING_TYPE
+ field :some_field, GraphQL::Types::String
end
RUBY
end
@@ -60,7 +60,7 @@ RSpec.describe RuboCop::Cop::Graphql::JSONType do
it 'does not add an offense for other types' do
expect_no_offenses(<<~RUBY.strip)
class MyType
- argument :some_arg, GraphQL::STRING_TYPE
+ argument :some_arg, GraphQL::Types::String
end
RUBY
end
diff --git a/spec/rubocop/cop/graphql/old_types_spec.rb b/spec/rubocop/cop/graphql/old_types_spec.rb
new file mode 100644
index 00000000000..396bf4ce997
--- /dev/null
+++ b/spec/rubocop/cop/graphql/old_types_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require_relative '../../../../rubocop/cop/graphql/old_types'
+
+RSpec.describe RuboCop::Cop::Graphql::OldTypes do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:cop) { described_class.new }
+
+ where(:old_type, :message) do
+ 'GraphQL::ID_TYPE' | 'Avoid using GraphQL::ID_TYPE. Use GraphQL::Types::ID instead'
+ 'GraphQL::INT_TYPE' | 'Avoid using GraphQL::INT_TYPE. Use GraphQL::Types::Int instead'
+ 'GraphQL::STRING_TYPE' | 'Avoid using GraphQL::STRING_TYPE. Use GraphQL::Types::String instead'
+ 'GraphQL::BOOLEAN_TYPE' | 'Avoid using GraphQL::BOOLEAN_TYPE. Use GraphQL::Types::Boolean instead'
+ end
+
+ with_them do
+ context 'fields' do
+ it 'adds an offense when an old type is used' do
+ expect_offense(<<~RUBY)
+ class MyType
+ field :some_field, #{old_type}
+ ^^^^^^^^^^^^^^^^^^^#{'^' * old_type.length} #{message}
+ end
+ RUBY
+ end
+
+ it "adds an offense when an old type is used with other keywords" do
+ expect_offense(<<~RUBY)
+ class MyType
+ field :some_field, #{old_type}, null: true, description: 'My description'
+ ^^^^^^^^^^^^^^^^^^^#{'^' * old_type.length}^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{message}
+ end
+ RUBY
+ end
+ end
+
+ context 'arguments' do
+ it 'adds an offense when an old type is used' do
+ expect_offense(<<~RUBY)
+ class MyType
+ field :some_arg, #{old_type}
+ ^^^^^^^^^^^^^^^^^#{'^' * old_type.length} #{message}
+ end
+ RUBY
+ end
+
+ it 'adds an offense when an old type is used with other keywords' do
+ expect_offense(<<~RUBY)
+ class MyType
+ argument :some_arg, #{old_type}, null: true, description: 'My description'
+ ^^^^^^^^^^^^^^^^^^^^#{'^' * old_type.length}^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{message}
+ end
+ RUBY
+ end
+ end
+ end
+
+ it 'does not add an offense for other types in fields' do
+ expect_no_offenses(<<~RUBY.strip)
+ class MyType
+ field :some_field, GraphQL::Types::JSON
+ end
+ RUBY
+ end
+
+ it 'does not add an offense for other types in arguments' do
+ expect_no_offenses(<<~RUBY.strip)
+ class MyType
+ argument :some_arg, GraphQL::Types::JSON
+ end
+ RUBY
+ end
+
+ it 'does not add an offense for uses outside of field or argument' do
+ expect_no_offenses(<<~RUBY.strip)
+ class MyType
+ foo :some_field, GraphQL::ID_TYPE
+ end
+ RUBY
+ end
+end
diff --git a/spec/rubocop/cop/ignored_columns_spec.rb b/spec/rubocop/cop/ignored_columns_spec.rb
index 1c72fedbf31..f87b1a1e520 100644
--- a/spec/rubocop/cop/ignored_columns_spec.rb
+++ b/spec/rubocop/cop/ignored_columns_spec.rb
@@ -14,4 +14,82 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
end
RUBY
end
+
+ context 'when only CE model exist' do
+ let(:file_path) { full_path('app/models/bar.rb') }
+
+ it 'does not flag ignore_columns usage in CE model' do
+ expect_no_offenses(<<~RUBY, file_path)
+ class Bar < ApplicationRecord
+ ignore_columns :foo, remove_with: '14.3', remove_after: '2021-09-22'
+ end
+ RUBY
+ end
+
+ it 'flags ignore_column usage in EE model' do
+ expect_no_offenses(<<~RUBY, file_path)
+ class Baz < ApplicationRecord
+ ignore_column :bar, remove_with: '14.3', remove_after: '2021-09-22'
+ end
+ RUBY
+ end
+ end
+
+ context 'when only EE model exist' do
+ let(:file_path) { full_path('ee/app/models/ee/bar.rb') }
+
+ before do
+ allow(File).to receive(:exist?).with(full_path('app/models/bar.rb')).and_return(false)
+ end
+
+ it 'flags ignore_columns usage in EE model' do
+ expect_no_offenses(<<~RUBY, file_path)
+ class Bar < ApplicationRecord
+ ignore_columns :foo, remove_with: '14.3', remove_after: '2021-09-22'
+ end
+ RUBY
+ end
+
+ it 'flags ignore_column usage in EE model' do
+ expect_no_offenses(<<~RUBY, file_path)
+ class Bar < ApplicationRecord
+ ignore_column :foo, remove_with: '14.3', remove_after: '2021-09-22'
+ end
+ RUBY
+ end
+ end
+
+ context 'when CE and EE model exist' do
+ let(:file_path) { full_path('ee/app/models/ee/bar.rb') }
+
+ before do
+ allow(File).to receive(:exist?).with(full_path('app/models/bar.rb')).and_return(true)
+ end
+
+ it 'flags ignore_columns usage in EE model' do
+ expect_offense(<<~RUBY, file_path)
+ class Bar < ApplicationRecord
+ ignore_columns :foo, remove_with: '14.3', remove_after: '2021-09-22'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If the model exists in CE and EE, [...]
+ end
+ RUBY
+ end
+
+ it 'flags ignore_column usage in EE model' do
+ expect_offense(<<~RUBY, file_path)
+ class Bar < ApplicationRecord
+ ignore_column :foo, remove_with: '14.3', remove_after: '2021-09-22'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If the model exists in CE and EE, [...]
+ end
+ RUBY
+ end
+ end
+
+ private
+
+ def full_path(path)
+ rails_root = '../../../'
+
+ File.expand_path(File.join(rails_root, path), __dir__)
+ end
end
diff --git a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
index 7bcaf36b014..6a8df2b507d 100644
--- a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
+++ b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
@@ -116,7 +116,6 @@ RSpec.describe RuboCop::Cop::Migration::CreateTableWithForeignKeys do
shared_context 'when there is a target to a high traffic table' do |dsl_method|
%w[
audit_events
- ci_build_trace_sections
ci_builds
ci_builds_metadata
ci_job_artifacts
diff --git a/spec/rubocop/cop/qa/selector_usage_spec.rb b/spec/rubocop/cop/qa/selector_usage_spec.rb
new file mode 100644
index 00000000000..b40c57f8991
--- /dev/null
+++ b/spec/rubocop/cop/qa/selector_usage_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../../../rubocop/cop/qa/selector_usage'
+
+RSpec.describe RuboCop::Cop::QA::SelectorUsage do
+ subject(:cop) { described_class.new }
+
+ shared_examples 'non-qa file usage' do
+ it 'reports an offense' do
+ expect_offense(<<-RUBY)
+ find('#{selector}').click
+ #{'^' * (selector.size + 2)} Do not use `#{selector}` as this is reserved for the end-to-end specs. Use a different selector or a data-testid instead.
+ RUBY
+ end
+ end
+
+ context 'in a QA file' do
+ before do
+ allow(cop).to receive(:in_qa_file?).and_return(true)
+ end
+
+ it 'has no error' do
+ expect_no_offenses(<<-RUBY)
+ has_element?('[data-qa-selector="my_selector"]')
+ RUBY
+ end
+ end
+
+ context 'outside of QA' do
+ before do
+ allow(cop).to receive(:in_qa_file?).and_return(false)
+ allow(cop).to receive(:in_spec?).and_return(true)
+ end
+
+ context 'data-qa-selector' do
+ let(:selector) { '[data-qa-selector="my_selector"]' }
+
+ it_behaves_like 'non-qa file usage'
+ end
+
+ context 'qa class' do
+ let(:selector) { '.qa-selector' }
+
+ it_behaves_like 'non-qa file usage'
+ end
+ end
+end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 4a58f341658..8a63715ed86 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -133,6 +133,7 @@ RSpec.describe BuildDetailsEntity do
let(:message) { subject[:callout_message] }
before do
+ build.pipeline.unlocked!
build.drop!(:missing_dependency_failure)
end
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
index 054406e4e65..f79bbd91a0a 100644
--- a/spec/serializers/ci/pipeline_entity_spec.rb
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Ci::PipelineEntity do
let(:pipeline) { create(:ci_empty_pipeline) }
it 'contains required fields' do
- expect(subject).to include :id, :user, :path, :coverage, :source
+ expect(subject).to include :id, :iid, :user, :path, :coverage, :source
expect(subject).to include :ref, :commit
expect(subject).to include :updated_at, :created_at
end
diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb
index c15c9324f94..ebfb21c4311 100644
--- a/spec/serializers/diff_file_entity_spec.rb
+++ b/spec/serializers/diff_file_entity_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe DiffFileEntity do
describe '#is_fully_expanded' do
context 'file with a conflict' do
- let(:options) { { conflicts: { diff_file.new_path => double(diff_lines_for_serializer: []) } } }
+ let(:options) { { conflicts: { diff_file.new_path => double(diff_lines_for_serializer: [], conflict_type: :both_modified) } } }
it 'returns false' do
expect(diff_file).not_to receive(:fully_expanded?)
@@ -90,4 +90,6 @@ RSpec.describe DiffFileEntity do
end
end
end
+
+ it_behaves_like 'diff file with conflict_type'
end
diff --git a/spec/serializers/diff_file_metadata_entity_spec.rb b/spec/serializers/diff_file_metadata_entity_spec.rb
index 3ce1ea49677..6f76d812177 100644
--- a/spec/serializers/diff_file_metadata_entity_spec.rb
+++ b/spec/serializers/diff_file_metadata_entity_spec.rb
@@ -4,8 +4,9 @@ require 'spec_helper'
RSpec.describe DiffFileMetadataEntity do
let(:merge_request) { create(:merge_request_with_diffs) }
- let(:raw_diff_file) { merge_request.merge_request_diff.diffs.raw_diff_files.first }
- let(:entity) { described_class.new(raw_diff_file) }
+ let(:diff_file) { merge_request.merge_request_diff.diffs.raw_diff_files.first }
+ let(:options) { {} }
+ let(:entity) { described_class.new(diff_file, options) }
context 'as json' do
subject { entity.as_json }
@@ -20,8 +21,11 @@ RSpec.describe DiffFileMetadataEntity do
:deleted_file,
:submodule,
:file_identifier_hash,
- :file_hash
+ :file_hash,
+ :conflict_type
)
end
+
+ it_behaves_like 'diff file with conflict_type'
end
end
diff --git a/spec/serializers/diffs_entity_spec.rb b/spec/serializers/diffs_entity_spec.rb
index a7446f14745..aef7d3732f8 100644
--- a/spec/serializers/diffs_entity_spec.rb
+++ b/spec/serializers/diffs_entity_spec.rb
@@ -9,8 +9,14 @@ RSpec.describe DiffsEntity do
let(:request) { EntityRequest.new(project: project, current_user: user) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
+ let(:allow_tree_conflicts) { false }
let(:options) do
- { request: request, merge_request: merge_request, merge_request_diffs: merge_request_diffs }
+ {
+ request: request,
+ merge_request: merge_request,
+ merge_request_diffs: merge_request_diffs,
+ allow_tree_conflicts: allow_tree_conflicts
+ }
end
let(:entity) do
@@ -87,7 +93,7 @@ RSpec.describe DiffsEntity do
let(:diff_file_without_conflict) { diff_files.to_a[-2] }
let(:resolvable_conflicts) { true }
- let(:conflict_file) { double(our_path: diff_file_with_conflict.new_path) }
+ let(:conflict_file) { double(path: diff_file_with_conflict.new_path, conflict_type: :both_modified) }
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: resolvable_conflicts) }
let(:merge_ref_head_diff) { true }
@@ -123,6 +129,18 @@ RSpec.describe DiffsEntity do
subject
end
+
+ context 'when allow_tree_conflicts is set to true' do
+ let(:allow_tree_conflicts) { true }
+
+ it 'conflicts are still highlighted' do
+ expect(conflict_file).to receive(:diff_lines_for_serializer)
+ expect(diff_file_with_conflict).not_to receive(:diff_lines_for_serializer)
+ expect(diff_file_without_conflict).to receive(:diff_lines_for_serializer).twice # for highlighted_diff_lines and is_fully_expanded
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index b1cbe7e216e..3311b434ce5 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -9,12 +9,17 @@ RSpec.describe DiffsMetadataEntity do
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
let(:merge_request_diff) { merge_request_diffs.last }
+ let(:options) { {} }
let(:entity) do
- described_class.new(merge_request_diff.diffs,
- request: request,
- merge_request: merge_request,
- merge_request_diffs: merge_request_diffs)
+ described_class.new(
+ merge_request_diff.diffs,
+ options.merge(
+ request: request,
+ merge_request: merge_request,
+ merge_request_diffs: merge_request_diffs
+ )
+ )
end
context 'as json' do
@@ -38,20 +43,61 @@ RSpec.describe DiffsMetadataEntity do
end
describe 'diff_files' do
- it 'returns diff files metadata' do
- raw_diff_files = merge_request_diff.diffs.raw_diff_files
+ let!(:raw_diff_files) { merge_request_diff.diffs.raw_diff_files }
+ before do
expect_next_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff) do |instance|
# Use lightweight version instead. Several methods delegate to it, so putting a 5
# calls limit.
expect(instance).to receive(:raw_diff_files).at_most(5).times.and_call_original
expect(instance).not_to receive(:diff_files)
end
+ end
+ it 'returns diff files metadata' do
payload = DiffFileMetadataEntity.represent(raw_diff_files).as_json
expect(subject[:diff_files]).to eq(payload)
end
+
+ context 'when merge_ref_head_diff and allow_tree_conflicts options are set' do
+ let(:conflict_file) { double(path: raw_diff_files.first.new_path, conflict_type: :both_modified) }
+ let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
+
+ before do
+ allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
+ end
+
+ context 'when merge_ref_head_diff is true and allow_tree_conflicts is false' do
+ let(:options) { { merge_ref_head_diff: true, allow_tree_conflicts: false } }
+
+ it 'returns diff files metadata without conflicts' do
+ payload = DiffFileMetadataEntity.represent(raw_diff_files).as_json
+
+ expect(subject[:diff_files]).to eq(payload)
+ end
+ end
+
+ context 'when merge_ref_head_diff is false and allow_tree_conflicts is true' do
+ let(:options) { { merge_ref_head_diff: false, allow_tree_conflicts: true } }
+
+ it 'returns diff files metadata without conflicts' do
+ payload = DiffFileMetadataEntity.represent(raw_diff_files).as_json
+
+ expect(subject[:diff_files]).to eq(payload)
+ end
+ end
+
+ context 'when merge_ref_head_diff and allow_tree_conflicts are true' do
+ let(:options) { { merge_ref_head_diff: true, allow_tree_conflicts: true } }
+
+ it 'returns diff files metadata with conflicts' do
+ payload = DiffFileMetadataEntity.represent(raw_diff_files, conflicts: { conflict_file.path => conflict_file }).as_json
+
+ expect(subject[:diff_files]).to eq(payload)
+ end
+ end
+ end
end
end
end
diff --git a/spec/serializers/integrations/project_entity_spec.rb b/spec/serializers/integrations/project_entity_spec.rb
new file mode 100644
index 00000000000..1564f7fad63
--- /dev/null
+++ b/spec/serializers/integrations/project_entity_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::ProjectEntity do
+ let_it_be(:project) { create(:project, :with_avatar) }
+
+ let(:entity) do
+ described_class.new(project)
+ end
+
+ context 'as json' do
+ include Gitlab::Routing.url_helpers
+
+ subject { entity.as_json }
+
+ it 'contains needed attributes' do
+ expect(subject).to include(
+ avatar_url: include('uploads'),
+ name: project.name,
+ full_path: project_path(project),
+ full_name: project.full_name
+ )
+ end
+ end
+end
diff --git a/spec/serializers/integrations/project_serializer_spec.rb b/spec/serializers/integrations/project_serializer_spec.rb
new file mode 100644
index 00000000000..053548075bb
--- /dev/null
+++ b/spec/serializers/integrations/project_serializer_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::ProjectSerializer do
+ it 'represents Integrations::ProjectEntity entities' do
+ expect(described_class.entity_class).to eq(Integrations::ProjectEntity)
+ end
+end
diff --git a/spec/serializers/jira_connect/app_data_serializer_spec.rb b/spec/serializers/jira_connect/app_data_serializer_spec.rb
new file mode 100644
index 00000000000..9c10a8a54a1
--- /dev/null
+++ b/spec/serializers/jira_connect/app_data_serializer_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::AppDataSerializer do
+ describe '#as_json' do
+ subject(:app_data_json) { described_class.new(subscriptions, signed_in).as_json }
+
+ let_it_be(:subscriptions) { create_list(:jira_connect_subscription, 2) }
+
+ let(:signed_in) { false }
+
+ it 'uses the subscription entity' do
+ expect(JiraConnect::SubscriptionEntity).to receive(:represent).with(subscriptions)
+
+ app_data_json
+ end
+
+ it 'includes a group path with already subscribed namespaces as skip_groups' do
+ expected_path = "/api/v4/groups?min_access_level=40&skip_groups%5B%5D=#{subscriptions.first.namespace_id}&skip_groups%5B%5D=#{subscriptions.last.namespace_id}"
+
+ expect(app_data_json).to include(groups_path: expected_path)
+ end
+
+ it { is_expected.to include(subscriptions_path: '/-/jira_connect/subscriptions') }
+ it { is_expected.to include(login_path: '/-/jira_connect/users') }
+
+ context 'when signed in' do
+ let(:signed_in) { true }
+
+ it { is_expected.to include(login_path: nil) }
+ end
+ end
+end
diff --git a/spec/serializers/jira_connect/group_entity_spec.rb b/spec/serializers/jira_connect/group_entity_spec.rb
new file mode 100644
index 00000000000..ade5ae89c52
--- /dev/null
+++ b/spec/serializers/jira_connect/group_entity_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::GroupEntity do
+ subject do
+ described_class.new(subscription.namespace).as_json
+ end
+
+ let(:subscription) { create(:jira_connect_subscription) }
+
+ it 'contains all necessary elements of the group', :aggregate_failures do
+ expect(subject[:name]).to eq(subscription.namespace.name)
+ expect(subject[:avatar_url]).to eq(subscription.namespace.avatar_url)
+ expect(subject[:full_name]).to eq(subscription.namespace.full_name)
+ expect(subject[:description]).to eq(subscription.namespace.description)
+ end
+end
diff --git a/spec/serializers/jira_connect/subscription_entity_spec.rb b/spec/serializers/jira_connect/subscription_entity_spec.rb
new file mode 100644
index 00000000000..5d5d6d585f2
--- /dev/null
+++ b/spec/serializers/jira_connect/subscription_entity_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::SubscriptionEntity do
+ subject do
+ described_class.new(subscription).as_json
+ end
+
+ let(:subscription) { create(:jira_connect_subscription) }
+
+ it 'contains all necessary elements of the subscription', :aggregate_failures do
+ expect(subject).to include(:created_at)
+ expect(subject[:unlink_path]).to eq("/-/jira_connect/subscriptions/#{subscription.id}")
+ expect(subject[:group]).to eq(
+ name: subscription.namespace.name,
+ avatar_url: subscription.namespace.avatar_url,
+ full_name: subscription.namespace.full_name,
+ description: subscription.namespace.description
+ )
+ end
+end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 5845a868e53..5f4b734fcea 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -302,16 +302,6 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:merge_pipeline]).to be_nil
end
end
-
- context 'when merge_request_cached_merge_pipeline_serializer is disabled' do
- before do
- stub_feature_flags(merge_request_cached_merge_pipeline_serializer: false)
- end
-
- it 'returns nil' do
- expect(subject[:merge_pipeline]).to be_nil
- end
- end
end
end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 9a0e25516cb..3aebe16438c 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -22,54 +22,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
.to eq(resource.default_merge_commit_message(include_description: true))
end
- describe 'merge_pipeline' do
- before do
- stub_feature_flags(merge_request_cached_merge_pipeline_serializer: false)
- end
-
- it 'returns nil' do
- expect(subject[:merge_pipeline]).to be_nil
- end
-
- context 'when is merged' do
- let_it_be(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
- let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
-
- before do
- project.add_maintainer(user)
- end
-
- context 'when user cannot read pipelines on target project' do
- before do
- project.team.truncate
- end
-
- it 'returns nil' do
- expect(subject[:merge_pipeline]).to be_nil
- end
- end
-
- it 'returns merge_pipeline' do
- pipeline_payload =
- MergeRequests::PipelineEntity
- .represent(pipeline, request: request)
- .as_json
-
- expect(subject[:merge_pipeline]).to eq(pipeline_payload)
- end
-
- context 'when merge_request_cached_merge_pipeline_serializer is enabled' do
- before do
- stub_feature_flags(merge_request_cached_merge_pipeline_serializer: true)
- end
-
- it 'returns nil' do
- expect(subject[:merge_pipeline]).to be_nil
- end
- end
- end
- end
-
describe 'new_blob_path' do
context 'when user can push to project' do
it 'returns path' do
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 926b33e8e1f..35846b0d4ea 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe MergeRequestWidgetEntity do
data = described_class.new(resource, request: request, issues_links: true).as_json
expect(data).to include(:issues_links)
- expect(data[:issues_links]).to include(:assign_to_closing, :closing, :mentioned_but_not_closing)
+ expect(data[:issues_links]).to include(:assign_to_closing, :closing, :mentioned_but_not_closing, :closing_count, :mentioned_count)
end
it 'omits issue links by default' do
@@ -354,4 +354,45 @@ RSpec.describe MergeRequestWidgetEntity do
end
end
end
+
+ describe 'when gitpod is disabled' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_enabled).and_return(false)
+ end
+
+ it 'exposes gitpod attributes' do
+ expect(subject).to include(
+ show_gitpod_button: false,
+ gitpod_url: nil,
+ gitpod_enabled: false
+ )
+ end
+ end
+
+ describe 'when gitpod is enabled' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_enabled).and_return(true)
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_url).and_return("https://gitpod.example.com")
+ end
+
+ it 'exposes gitpod attributes' do
+ mr_url = Gitlab::Routing.url_helpers.project_merge_request_url(resource.project, resource)
+
+ expect(subject).to include(
+ show_gitpod_button: true,
+ gitpod_url: "https://gitpod.example.com##{mr_url}",
+ gitpod_enabled: false
+ )
+ end
+
+ describe 'when gitpod is enabled for user' do
+ before do
+ allow(user).to receive(:gitpod_enabled).and_return(true)
+ end
+
+ it 'exposes gitpod_enabled as true' do
+ expect(subject[:gitpod_enabled]).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index f408deb734e..db8bf92cbf5 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -7,11 +7,13 @@ RSpec.describe PaginatedDiffEntity do
let(:request) { double('request', current_user: user) }
let(:merge_request) { create(:merge_request) }
let(:diff_batch) { merge_request.merge_request_diff.diffs_in_batch(2, 3, diff_options: nil) }
+ let(:allow_tree_conflicts) { false }
let(:options) do
{
request: request,
merge_request: merge_request,
- pagination_data: diff_batch.pagination_data
+ pagination_data: diff_batch.pagination_data,
+ allow_tree_conflicts: allow_tree_conflicts
}
end
@@ -34,7 +36,7 @@ RSpec.describe PaginatedDiffEntity do
let(:diff_file_without_conflict) { diff_files.first }
let(:resolvable_conflicts) { true }
- let(:conflict_file) { double(our_path: diff_file_with_conflict.new_path) }
+ let(:conflict_file) { double(path: diff_file_with_conflict.new_path, conflict_type: :both_modified) }
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: resolvable_conflicts) }
let(:merge_ref_head_diff) { true }
@@ -70,6 +72,18 @@ RSpec.describe PaginatedDiffEntity do
subject
end
+
+ context 'when allow_tree_conflicts is set to true' do
+ let(:allow_tree_conflicts) { true }
+
+ it 'conflicts are still highlighted' do
+ expect(conflict_file).to receive(:diff_lines_for_serializer)
+ expect(diff_file_with_conflict).not_to receive(:diff_lines_for_serializer)
+ expect(diff_file_without_conflict).to receive(:diff_lines_for_serializer).twice # for highlighted_diff_lines and is_fully_expanded
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/services/admin/propagate_service_template_spec.rb b/spec/services/admin/propagate_service_template_spec.rb
deleted file mode 100644
index c8ca3173f99..00000000000
--- a/spec/services/admin/propagate_service_template_spec.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Admin::PropagateServiceTemplate do
- describe '.propagate' do
- let_it_be(:project) { create(:project) }
-
- let!(:service_template) do
- Integrations::Pushover.create!(
- template: true,
- active: true,
- push_events: false,
- properties: {
- device: 'MyDevice',
- sound: 'mic',
- priority: 4,
- user_key: 'asdf',
- api_key: '123456789'
- }
- )
- end
-
- it 'calls to PropagateIntegrationProjectWorker' do
- expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
- .with(service_template.id, project.id, project.id)
-
- described_class.propagate(service_template)
- end
-
- context 'with a project that has another service' do
- before do
- Integrations::Bamboo.create!(
- active: true,
- project: project,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: 'password',
- build_key: 'build'
- }
- )
- end
-
- it 'calls to PropagateIntegrationProjectWorker' do
- expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
- .with(service_template.id, project.id, project.id)
-
- described_class.propagate(service_template)
- end
- end
-
- it 'does not create the service if it exists already' do
- Integration.build_from_integration(service_template, project_id: project.id).save!
-
- expect { described_class.propagate(service_template) }
- .not_to change { Integration.count }
- end
- end
-end
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 4124696ac08..b456f7a2745 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -17,11 +17,6 @@ RSpec.describe Auth::ContainerRegistryAuthenticationService do
project.add_developer(current_user)
end
- shared_examples 'an unmodified token' do
- it_behaves_like 'a valid token'
- it { expect(payload['access']).not_to include(have_key('migration_eligible')) }
- end
-
shared_examples 'a modified token with migration eligibility' do |eligible|
it_behaves_like 'a valid token'
it { expect(payload['access']).to include(include('migration_eligible' => eligible)) }
@@ -71,7 +66,7 @@ RSpec.describe Auth::ContainerRegistryAuthenticationService do
{ scopes: ["repository:#{project.full_path}:pull"] }
end
- it_behaves_like 'an unmodified token'
+ it_behaves_like 'a modified token'
end
context 'with push action' do
@@ -82,20 +77,12 @@ RSpec.describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'a modified token'
end
- context 'with multiple actions including push' do
+ context 'with multiple actions' do
let(:current_params) do
{ scopes: ["repository:#{project.full_path}:pull,push,delete"] }
end
it_behaves_like 'a modified token'
end
-
- context 'with multiple actions excluding push' do
- let(:current_params) do
- { scopes: ["repository:#{project.full_path}:pull,delete"] }
- end
-
- it_behaves_like 'an unmodified token'
- end
end
end
diff --git a/spec/services/auth/dependency_proxy_authentication_service_spec.rb b/spec/services/auth/dependency_proxy_authentication_service_spec.rb
index 35e6d59b456..667f361dc34 100644
--- a/spec/services/auth/dependency_proxy_authentication_service_spec.rb
+++ b/spec/services/auth/dependency_proxy_authentication_service_spec.rb
@@ -21,6 +21,12 @@ RSpec.describe Auth::DependencyProxyAuthenticationService do
end
end
+ shared_examples 'returning a token' do
+ it 'returns a token' do
+ expect(subject[:token]).not_to be_nil
+ end
+ end
+
context 'dependency proxy is not enabled' do
before do
stub_config(dependency_proxy: { enabled: false })
@@ -36,15 +42,13 @@ RSpec.describe Auth::DependencyProxyAuthenticationService do
end
context 'with a deploy token as user' do
- let_it_be(:user) { create(:deploy_token) }
+ let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
- it_behaves_like 'returning', status: 403, message: 'access forbidden'
+ it_behaves_like 'returning a token'
end
context 'with a user' do
- it 'returns a token' do
- expect(subject[:token]).not_to be_nil
- end
+ it_behaves_like 'returning a token'
end
end
end
diff --git a/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb b/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb
new file mode 100644
index 00000000000..62862d0e558
--- /dev/null
+++ b/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuthorizedProjectUpdate::ProjectRecalculatePerUserService, '#execute' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:another_user) { create(:user) }
+
+ subject(:execute) { described_class.new(project, user).execute }
+
+ it 'returns success' do
+ expect(execute.success?).to eq(true)
+ end
+
+ context 'when there are no changes to be made' do
+ it 'does not change authorizations' do
+ expect { execute }.not_to(change { ProjectAuthorization.count })
+ end
+ end
+
+ context 'when there are changes to be made' do
+ context 'when addition is required' do
+ before do
+ project.add_developer(user)
+ project.add_developer(another_user)
+ project.project_authorizations.where(user: [user, another_user]).delete_all
+ end
+
+ it 'adds a new authorization record for the specific user' do
+ expect { execute }.to(
+ change { project.project_authorizations.where(user: user).count }
+ .from(0).to(1)
+ )
+ end
+
+ it 'does not add a new authorization record for the other user' do
+ expect { execute }.not_to(
+ change { project.project_authorizations.where(user: another_user).count }
+ )
+ end
+
+ it 'adds a new authorization record with the correct access level for the specific user' do
+ execute
+
+ project_authorization = project.project_authorizations.where(
+ user: user,
+ access_level: Gitlab::Access::DEVELOPER
+ )
+
+ expect(project_authorization).to exist
+ end
+ end
+
+ context 'when removal is required' do
+ before do
+ create(:project_authorization, user: user, project: project)
+ create(:project_authorization, user: another_user, project: project)
+ end
+
+ it 'removes the authorization record for the specific user' do
+ expect { execute }.to(
+ change { project.project_authorizations.where(user: user).count }
+ .from(1).to(0)
+ )
+ end
+
+ it 'does not remove the authorization record for the other user' do
+ expect { execute }.not_to(
+ change { project.project_authorizations.where(user: another_user).count }
+ )
+ end
+ end
+
+ context 'when an update in access level is required' do
+ before do
+ project.add_developer(user)
+ project.add_developer(another_user)
+ project.project_authorizations.where(user: [user, another_user]).delete_all
+ create(:project_authorization, project: project, user: user, access_level: Gitlab::Access::GUEST)
+ create(:project_authorization, project: project, user: another_user, access_level: Gitlab::Access::GUEST)
+ end
+
+ it 'updates the authorization of the specific user to the correct access level' do
+ expect { execute }.to(
+ change { project.project_authorizations.find_by(user: user).access_level }
+ .from(Gitlab::Access::GUEST).to(Gitlab::Access::DEVELOPER)
+ )
+ end
+
+ it 'does not update the authorization of the other user to the correct access level' do
+ expect { execute }.not_to(
+ change { project.project_authorizations.find_by(user: another_user).access_level }
+ .from(Gitlab::Access::GUEST)
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index 2fd544ab949..bbdc178b234 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -55,6 +55,15 @@ RSpec.describe Boards::Issues::ListService do
it_behaves_like 'issues list service'
end
+
+ context 'when filtering by type' do
+ it 'only returns the specified type' do
+ issue = create(:labeled_issue, project: project, milestone: m1, labels: [development, p1], issue_type: 'incident')
+ params = { board_id: board.id, id: list1.id, issue_types: 'incident' }
+
+ expect(described_class.new(parent, user, params).execute).to eq [issue]
+ end
+ end
end
# rubocop: disable RSpec/MultipleMemoizedHelpers
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 4b0029e27cb..517222c0e69 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe BulkCreateIntegrationService do
described_class.new(integration, batch, association).execute
expect(created_integration.attributes.except(*excluded_attributes))
- .to eq(integration.attributes.except(*excluded_attributes))
+ .to eq(integration.reload.attributes.except(*excluded_attributes))
end
context 'integration with data fields' do
@@ -96,18 +96,4 @@ RSpec.describe BulkCreateIntegrationService do
it_behaves_like 'updates inherit_from_id'
end
end
-
- context 'passing a template integration' do
- let(:integration) { template_integration }
-
- context 'with a project association' do
- let!(:project) { create(:project) }
- let(:created_integration) { project.jira_integration }
- let(:batch) { Project.where(id: project.id) }
- let(:association) { 'project' }
- let(:inherit_from_id) { integration.id }
-
- it_behaves_like 'creates integration from batch ids'
- end
- end
end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index b6b7d1936a2..c10a9b75648 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -51,11 +51,11 @@ RSpec.describe BulkUpdateIntegrationService do
context 'with inherited integration' do
it 'updates the integration', :aggregate_failures do
- described_class.new(subgroup_integration, batch).execute
+ described_class.new(subgroup_integration.reload, batch).execute
expect(integration.reload.inherit_from_id).to eq(group_integration.id)
expect(integration.reload.attributes.except(*excluded_attributes))
- .to eq(subgroup_integration.attributes.except(*excluded_attributes))
+ .to eq(subgroup_integration.reload.attributes.except(*excluded_attributes))
expect(excluded_integration.reload.inherit_from_id).not_to eq(group_integration.id)
expect(excluded_integration.reload.attributes.except(*excluded_attributes))
@@ -76,4 +76,16 @@ RSpec.describe BulkUpdateIntegrationService do
end
end
end
+
+ it 'works with batch as an ActiveRecord::Relation' do
+ expect do
+ described_class.new(group_integration, Integration.where(id: integration.id)).execute
+ end.to change { integration.reload.url }.to(group_integration.url)
+ end
+
+ it 'works with batch as an array of ActiveRecord objects' do
+ expect do
+ described_class.new(group_integration, [integration]).execute
+ end.to change { integration.reload.url }.to(group_integration.url)
+ end
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index f8c49060ce0..df5ddcafb37 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -8,37 +8,41 @@ RSpec.describe Ci::AfterRequeueJobService do
let(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 0, name: 'build') }
let!(:test1) { create(:ci_build, :success, pipeline: pipeline, stage_idx: 1) }
let!(:test2) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: 1) }
- let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 0, name: 'build') }
+ let!(:test3) { create(:ci_build, :skipped, :dependent, pipeline: pipeline, stage_idx: 1, needed: build) }
+ let!(:deploy) { create(:ci_build, :skipped, :dependent, pipeline: pipeline, stage_idx: 2, needed: test3) }
subject(:execute_service) { described_class.new(project, user).execute(build) }
it 'marks subsequent skipped jobs as processable' do
expect(test1.reload).to be_success
expect(test2.reload).to be_skipped
+ expect(test3.reload).to be_skipped
+ expect(deploy.reload).to be_skipped
execute_service
expect(test1.reload).to be_success
expect(test2.reload).to be_created
+ expect(test3.reload).to be_created
+ expect(deploy.reload).to be_created
end
context 'when there is a job need from the same stage' do
- let!(:test3) do
+ let!(:test4) do
create(:ci_build,
:skipped,
+ :dependent,
pipeline: pipeline,
stage_idx: 0,
- scheduling_type: :dag)
- end
-
- before do
- create(:ci_build_need, build: test3, name: 'build')
+ scheduling_type: :dag,
+ needed: build)
end
it 'marks subsequent skipped jobs as processable' do
- expect { execute_service }.to change { test3.reload.status }.from('skipped').to('created')
+ expect { execute_service }.to change { test4.reload.status }.from('skipped').to('created')
end
context 'with ci_same_stage_job_needs FF disabled' do
@@ -47,7 +51,7 @@ RSpec.describe Ci::AfterRequeueJobService do
end
it 'does nothing with the build' do
- expect { execute_service }.not_to change { test3.reload.status }
+ expect { execute_service }.not_to change { test4.reload.status }
end
end
end
diff --git a/spec/services/ci/append_build_trace_service_spec.rb b/spec/services/ci/append_build_trace_service_spec.rb
index b251f00158f..487dbacbe90 100644
--- a/spec/services/ci/append_build_trace_service_spec.rb
+++ b/spec/services/ci/append_build_trace_service_spec.rb
@@ -75,25 +75,5 @@ RSpec.describe Ci::AppendBuildTraceService do
expect(build.reload).to be_failed
expect(build.failure_reason).to eq 'trace_size_exceeded'
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(ci_jobs_trace_size_limit: false)
- end
-
- it 'appends trace chunks' do
- stream_size = 1.25.megabytes
- body_data = 'x' * stream_size
- content_range = "0-#{stream_size}"
-
- result = described_class
- .new(build, content_range: content_range)
- .execute(body_data)
-
- expect(result.status).to eq 202
- expect(result.stream_size).to eq stream_size
- expect(build.trace_chunks.count).to eq 10
- end
- end
end
end
diff --git a/spec/services/ci/build_cancel_service_spec.rb b/spec/services/ci/build_cancel_service_spec.rb
new file mode 100644
index 00000000000..fe036dc1368
--- /dev/null
+++ b/spec/services/ci/build_cancel_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildCancelService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ describe '#execute' do
+ subject(:execute) { described_class.new(build, user).execute }
+
+ context 'when user is authorized to cancel the build' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when build is cancelable' do
+ let!(:build) { create(:ci_build, :cancelable, pipeline: pipeline) }
+
+ it 'transits build to canceled', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_success
+ expect(response.payload.reload).to be_canceled
+ end
+ end
+
+ context 'when build is not cancelable' do
+ let!(:build) { create(:ci_build, :canceled, pipeline: pipeline) }
+
+ it 'responds with unprocessable entity', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:unprocessable_entity)
+ end
+ end
+ end
+
+ context 'when user is not authorized to cancel the build' do
+ let!(:build) { create(:ci_build, :cancelable, pipeline: pipeline) }
+
+ it 'responds with forbidden', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:forbidden)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/build_unschedule_service_spec.rb b/spec/services/ci/build_unschedule_service_spec.rb
new file mode 100644
index 00000000000..d784d9a2754
--- /dev/null
+++ b/spec/services/ci/build_unschedule_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildUnscheduleService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ describe '#execute' do
+ subject(:execute) { described_class.new(build, user).execute }
+
+ context 'when user is authorized to unschedule the build' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when build is scheduled' do
+ let!(:build) { create(:ci_build, :scheduled, pipeline: pipeline) }
+
+ it 'transits build to manual' do
+ response = execute
+
+ expect(response).to be_success
+ expect(response.payload.reload).to be_manual
+ end
+ end
+
+ context 'when build is not scheduled' do
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it 'responds with unprocessable entity', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:unprocessable_entity)
+ end
+ end
+ end
+
+ context 'when user is not authorized to unschedule the build' do
+ let!(:build) { create(:ci_build, :scheduled, pipeline: pipeline) }
+
+ it 'responds with forbidden', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:forbidden)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 18bd59a17f0..2237fd76d07 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -624,6 +624,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
let(:primary_pipeline) do
Ci::CreatePipelineService.new(upstream_project, upstream_project.owner, { ref: 'master' })
.execute(:push, save_on_errors: false)
+ .payload
end
let(:bridge) { primary_pipeline.processables.find_by(name: 'bridge-job') }
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
index f9767a794db..f5f162e4578 100644
--- a/spec/services/ci/create_pipeline_service/cache_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
let(:job) { pipeline.builds.find_by(name: 'job') }
before do
diff --git a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
index a42770aae20..c69c91593ae 100644
--- a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
+++ b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index b3b8e34dd8e..e1d60ed57ef 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
let_it_be(:group) { create(:group, name: 'my-organization') }
let(:upstream_project) { create(:project, :repository, name: 'upstream', group: group) }
- let(:downstram_project) { create(:project, :repository, name: 'downstream', group: group) }
+ let(:downstream_project) { create(:project, :repository, name: 'downstream', group: group) }
let(:user) { create(:user) }
let(:service) do
@@ -15,9 +15,9 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
before do
upstream_project.add_developer(user)
- downstram_project.add_developer(user)
+ downstream_project.add_developer(user)
create_gitlab_ci_yml(upstream_project, upstream_config)
- create_gitlab_ci_yml(downstram_project, downstream_config)
+ create_gitlab_ci_yml(downstream_project, downstream_config)
end
context 'with resource group', :aggregate_failures do
@@ -79,7 +79,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
def create_pipeline!
- service.execute(:push)
+ service.execute(:push).payload
end
def create_gitlab_ci_yml(project, content)
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
index 42c3f52541b..f150a4f8b51 100644
--- a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:upstream_pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) { create(:ci_bridge, pipeline: upstream_pipeline) }
- subject { service.execute(:push, bridge: bridge) }
+ subject { service.execute(:push, bridge: bridge).payload }
context 'custom config content' do
let(:bridge) do
diff --git a/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb b/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
index 5dceb9f57f0..026111d59f1 100644
--- a/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/dry_run_spec.rb b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
index 01df7772eef..ae43c63b516 100644
--- a/spec/services/ci/create_pipeline_service/dry_run_spec.rb
+++ b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
- subject { service.execute(:push, dry_run: true) }
+ subject { service.execute(:push, dry_run: true).payload }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
index e77591298ad..43b5220334c 100644
--- a/spec/services/ci/create_pipeline_service/environment_spec.rb
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Ci::CreatePipelineService do
end
describe '#execute' do
- subject { service.execute(:push) }
+ subject { service.execute(:push).payload }
context 'with deployment tier' do
before do
diff --git a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
index df881c1ac8f..9add096d782 100644
--- a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user, ref: 'master') }
- let(:pipeline) { service.execute(:push) }
+ let(:pipeline) { service.execute(:push).payload }
let(:job) { pipeline.builds.find_by(name: 'job') }
before do
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
new file mode 100644
index 00000000000..46271ee36c0
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ context 'include:' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ let(:ref) { 'refs/heads/master' }
+ let(:source) { :push }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(source).payload }
+
+ let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
+
+ before do
+ allow(project.repository)
+ .to receive(:blob_data_at).with(project.commit.id, '.gitlab-ci.yml')
+ .and_return(config)
+
+ allow(project.repository)
+ .to receive(:blob_data_at).with(project.commit.id, file_location)
+ .and_return(File.read(Rails.root.join(file_location)))
+ end
+
+ context 'with a local file' do
+ let(:config) do
+ <<~EOY
+ include: #{file_location}
+ job:
+ script: exit 0
+ EOY
+ end
+
+ it 'includes the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
+ end
+ end
+
+ context 'with a local file with rules' do
+ let(:config) do
+ <<~EOY
+ include:
+ - local: #{file_location}
+ rules:
+ - if: $CI_PROJECT_ID == "#{project_id}"
+ job:
+ script: exit 0
+ EOY
+ end
+
+ context 'when the rules matches' do
+ let(:project_id) { project.id }
+
+ it 'includes the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
+ end
+
+ context 'when the FF ci_include_rules is disabled' do
+ before do
+ stub_feature_flags(ci_include_rules: false)
+ end
+
+ it 'includes the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
+ end
+ end
+ end
+
+ context 'when the rules does not match' do
+ let(:project_id) { non_existing_record_id }
+
+ it 'does not include the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job')
+ end
+
+ context 'when the FF ci_include_rules is disabled' do
+ before do
+ stub_feature_flags(ci_include_rules: false)
+ end
+
+ it 'includes the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/merge_requests_spec.rb b/spec/services/ci/create_pipeline_service/merge_requests_spec.rb
index e5347faed6a..a1f85faa69f 100644
--- a/spec/services/ci/create_pipeline_service/merge_requests_spec.rb
+++ b/spec/services/ci/create_pipeline_service/merge_requests_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/feature' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file <<-EOS
diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb
index d096db10d0b..9070d86f7f6 100644
--- a/spec/services/ci/create_pipeline_service/needs_spec.rb
+++ b/spec/services/ci/create_pipeline_service/needs_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/parallel_spec.rb b/spec/services/ci/create_pipeline_service/parallel_spec.rb
index 5e34a67d376..6b455bf4874 100644
--- a/spec/services/ci/create_pipeline_service/parallel_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parallel_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:user) { project.owner }
let(:service) { described_class.new(project, user, { ref: 'master' }) }
- let(:pipeline) { service.execute(:push) }
+ let(:pipeline) { service.execute(:push).payload }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
index 94500a550c6..761504ffb58 100644
--- a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Ci::CreatePipelineService do
describe '#execute' do
context 'when source is a dangling build' do
- subject { service.execute(:ondemand_dast_scan, content: content) }
+ subject { service.execute(:ondemand_dast_scan, content: content).payload }
context 'parameter config content' do
it 'creates a pipeline' do
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 7a6535ed3fa..6eb1315fff4 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -369,6 +369,6 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
def create_pipeline!
- service.execute(:push)
+ service.execute(:push).payload
end
end
diff --git a/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb b/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
index c84d9a53973..5e34eeb99db 100644
--- a/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
+++ b/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
let(:config) do
<<~YAML
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index acdf38bbc13..d0915f099de 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
+ let(:pipeline) { service.execute(source).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
context 'job:rules' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 56bfeda3bff..2fdb0ed3c0d 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -19,7 +19,6 @@ RSpec.describe Ci::CreatePipelineService do
def execute_service(
source: :push,
after: project.commit.id,
- message: 'Message',
ref: ref_name,
trigger_request: nil,
variables_attributes: nil,
@@ -32,7 +31,6 @@ RSpec.describe Ci::CreatePipelineService do
params = { ref: ref,
before: '00000000',
after: after,
- commits: [{ message: message }],
variables_attributes: variables_attributes,
push_options: push_options,
source_sha: source_sha,
@@ -49,12 +47,16 @@ RSpec.describe Ci::CreatePipelineService do
# rubocop:enable Metrics/ParameterLists
context 'valid params' do
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
let(:pipeline_on_previous_commit) do
execute_service(
after: previous_commit_sha_from_ref('master')
- )
+ ).payload
+ end
+
+ it 'responds with success' do
+ expect(execute_service).to be_success
end
it 'creates a pipeline' do
@@ -128,7 +130,7 @@ RSpec.describe Ci::CreatePipelineService do
merge_request_1
merge_request_2
- head_pipeline = execute_service(ref: 'feature', after: nil)
+ head_pipeline = execute_service(ref: 'feature', after: nil).payload
expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
@@ -157,7 +159,7 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: "branch_1",
source_project: project)
- head_pipeline = execute_service
+ head_pipeline = execute_service.payload
expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline)
end
@@ -178,7 +180,7 @@ RSpec.describe Ci::CreatePipelineService do
source_project: project,
target_project: target_project)
- head_pipeline = execute_service(ref: 'feature', after: nil)
+ head_pipeline = execute_service(ref: 'feature', after: nil).payload
expect(merge_request.reload.head_pipeline).to eq(head_pipeline)
end
@@ -209,7 +211,7 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'feature',
source_project: project)
- head_pipeline = execute_service
+ head_pipeline = execute_service.payload
expect(head_pipeline).to be_persisted
expect(head_pipeline.yaml_errors).to be_present
@@ -230,7 +232,7 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'feature',
source_project: project)
- head_pipeline = execute_service
+ head_pipeline = execute_service.payload
expect(head_pipeline).to be_skipped
expect(head_pipeline).to be_persisted
@@ -260,7 +262,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'cancels running outdated pipelines', :sidekiq_inline do
pipeline_on_previous_commit.reload.run
- head_pipeline = execute_service
+ head_pipeline = execute_service.payload
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: head_pipeline.id)
end
@@ -276,7 +278,8 @@ RSpec.describe Ci::CreatePipelineService do
new_pipeline = execute_service(
ref: 'refs/heads/feature',
after: previous_commit_sha_from_ref('feature')
- )
+ ).payload
+
pipeline
expect(new_pipeline.reload).to have_attributes(status: 'created', auto_canceled_by_id: nil)
@@ -290,7 +293,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is cancelable' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_nil
end
@@ -303,7 +306,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is cancelable' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_truthy
end
@@ -316,7 +319,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is not cancelable' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_falsy
end
@@ -476,21 +479,25 @@ RSpec.describe Ci::CreatePipelineService do
context "skip tag if there is no build for it" do
it "creates commit if there is appropriate job" do
- expect(execute_service).to be_persisted
+ expect(execute_service.payload).to be_persisted
end
it "creates commit if there is no appropriate job but deploy job has right ref setting" do
config = YAML.dump({ deploy: { script: "ls", only: ["master"] } })
stub_ci_pipeline_yaml_file(config)
- expect(execute_service).to be_persisted
+ expect(execute_service.payload).to be_persisted
end
end
- it 'skips creating pipeline for refs without .gitlab-ci.yml' do
+ it 'skips creating pipeline for refs without .gitlab-ci.yml', :aggregate_failures do
stub_ci_pipeline_yaml_file(nil)
- expect(execute_service).not_to be_persisted
+ response = execute_service
+
+ expect(response).to be_error
+ expect(response.message).to eq('Missing CI config file')
+ expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
expect(Namespaces::OnboardingPipelineCreatedWorker).not_to receive(:perform_async)
end
@@ -499,7 +506,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'creates failed pipeline' do
stub_ci_pipeline_yaml_file(ci_yaml)
- pipeline = execute_service(message: message)
+ pipeline = execute_service.payload
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
@@ -516,7 +523,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'pull it from the repository' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline).to be_repository_source
expect(pipeline.builds.map(&:name)).to eq ['rspec']
end
@@ -530,7 +537,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'pull it from Auto-DevOps' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline).to be_auto_devops_source
expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality eslint-sast secret_detection semgrep-sast test])
end
@@ -541,11 +548,12 @@ RSpec.describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(nil)
end
- it 'attaches errors to the pipeline' do
- pipeline = execute_service
+ it 'responds with error message', :aggregate_failures do
+ response = execute_service
- expect(pipeline.errors.full_messages).to eq ['Missing CI config file']
- expect(pipeline).not_to be_persisted
+ expect(response).to be_error
+ expect(response.message).to eq('Missing CI config file')
+ expect(response.payload).not_to be_persisted
end
end
@@ -556,7 +564,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'saves error in pipeline' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline.yaml_errors).to include('Undefined error')
end
@@ -648,7 +656,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'saves error in pipeline' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline.yaml_errors).to include('Undefined error')
end
@@ -661,26 +669,18 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when commit contains a [ci skip] directive' do
- let(:message) { "some message[ci skip]" }
-
- ci_messages = [
- "some message[ci skip]",
- "some message[skip ci]",
- "some message[CI SKIP]",
- "some message[SKIP CI]",
- "some message[ci_skip]",
- "some message[skip_ci]",
- "some message[ci-skip]",
- "some message[skip-ci]"
- ]
+ shared_examples 'creating a pipeline' do
+ it 'does not skip pipeline creation' do
+ pipeline = execute_service.payload
- before do
- allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message }
+ expect(pipeline).to be_persisted
+ expect(pipeline.builds.first.name).to eq("rspec")
+ end
end
- ci_messages.each do |ci_message|
- it "skips builds creation if the commit message is #{ci_message}" do
- pipeline = execute_service(message: ci_message)
+ shared_examples 'skipping a pipeline' do
+ it 'skips pipeline creation' do
+ pipeline = execute_service.payload
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
@@ -688,14 +688,26 @@ RSpec.describe Ci::CreatePipelineService do
end
end
- shared_examples 'creating a pipeline' do
- it 'does not skip pipeline creation' do
- allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { commit_message }
+ before do
+ allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { commit_message }
+ end
+
+ skip_commit_messages = [
+ "some message[ci skip]",
+ "some message[skip ci]",
+ "some message[CI SKIP]",
+ "some message[SKIP CI]",
+ "some message[ci_skip]",
+ "some message[skip_ci]",
+ "some message[ci-skip]",
+ "some message[skip-ci]"
+ ]
- pipeline = execute_service(message: commit_message)
+ skip_commit_messages.each do |skip_commit_message|
+ context "when the commit message is #{skip_commit_message}" do
+ let(:commit_message) { skip_commit_message }
- expect(pipeline).to be_persisted
- expect(pipeline.builds.first.name).to eq("rspec")
+ it_behaves_like 'skipping a pipeline'
end
end
@@ -712,9 +724,14 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when there is [ci skip] tag in commit message and yaml is invalid' do
+ let(:commit_message) { 'some message [ci skip]' }
let(:ci_yaml) { 'invalid: file: fiile' }
- it_behaves_like 'a failed pipeline'
+ before do
+ stub_ci_pipeline_yaml_file(ci_yaml)
+ end
+
+ it_behaves_like 'skipping a pipeline'
end
end
@@ -724,7 +741,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a pipline in the skipped state' do
- pipeline = execute_service(push_options: push_options)
+ pipeline = execute_service(push_options: push_options).payload
# TODO: DRY these up with "skips builds creation if the commit message"
expect(pipeline).to be_persisted
@@ -739,10 +756,12 @@ RSpec.describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(config)
end
- it 'does not create a new pipeline' do
+ it 'does not create a new pipeline', :aggregate_failures do
result = execute_service
- expect(result).not_to be_persisted
+ expect(result).to be_error
+ expect(result.message).to eq('No stages / jobs for this pipeline.')
+ expect(result.payload).not_to be_persisted
expect(Ci::Build.all).to be_empty
expect(Ci::Pipeline.count).to eq(0)
end
@@ -757,10 +776,11 @@ RSpec.describe Ci::CreatePipelineService do
.and_call_original
end
- it 'rewinds iid' do
+ it 'rewinds iid', :aggregate_failures do
result = execute_service
- expect(result).not_to be_persisted
+ expect(result).to be_error
+ expect(result.payload).not_to be_persisted
expect(internal_id.last_value).to eq(0)
end
end
@@ -773,7 +793,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'does not create a new pipeline', :sidekiq_inline do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
expect(result.manual_actions).not_to be_empty
@@ -793,7 +813,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates the environment with tags' do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: "review/master")).to be_present
@@ -815,7 +835,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates the environment with auto stop in' do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
expect(result.builds.first.options[:environment][:auto_stop_in]).to eq('1 day')
@@ -835,7 +855,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'skipps persisted variables in environment name' do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: "review/id1/id2")).to be_present
@@ -860,7 +880,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'stores the requested namespace' do
- result = execute_service
+ result = execute_service.payload
build = result.builds.first
expect(result).to be_persisted
@@ -876,7 +896,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'does not create an environment' do
expect do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
end.not_to change { Environment.count }
@@ -896,7 +916,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a pipeline with the environment' do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: 'production')).to be_present
@@ -906,7 +926,7 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when builds with auto-retries are configured' do
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
let(:rspec_job) { pipeline.builds.find_by(name: 'rspec') }
before do
@@ -946,7 +966,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:resource_group_key) { 'iOS' }
it 'persists the association correctly' do
- result = execute_service
+ result = execute_service.payload
deploy_job = result.builds.find_by_name!(:test)
resource_group = project.resource_groups.find_by_key!(resource_group_key)
@@ -962,7 +982,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
it 'interpolates the variables into the key correctly' do
- result = execute_service
+ result = execute_service.payload
expect(result).to be_persisted
expect(project.resource_groups.exists?(key: 'master-test')).to eq(true)
@@ -979,7 +999,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'correctly creates builds with custom timeout value configured' do
- pipeline = execute_service
+ pipeline = execute_service.payload
expect(pipeline).to be_persisted
expect(pipeline.builds.find_by(name: 'rspec').options[:job_timeout]).to eq 123
@@ -994,7 +1014,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is valid config' do
- pipeline = execute_service
+ pipeline = execute_service.payload
build = pipeline.builds.first
expect(pipeline).to be_kind_of(Ci::Pipeline)
expect(pipeline).to be_valid
@@ -1059,14 +1079,14 @@ RSpec.describe Ci::CreatePipelineService do
project.add_developer(user)
end
- it 'does not create a pipeline' do
- expect(execute_service).not_to be_persisted
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(execute_service.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
end
end
context 'when user is maintainer' do
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
before do
project.add_maintainer(user)
@@ -1083,9 +1103,11 @@ RSpec.describe Ci::CreatePipelineService do
let(:user) {}
let(:trigger_request) { create(:ci_trigger_request) }
- it 'does not create a pipeline' do
- expect(execute_service(trigger_request: trigger_request))
- .not_to be_persisted
+ it 'does not create a pipeline', :aggregate_failures do
+ response = execute_service(trigger_request: trigger_request)
+
+ expect(response).to be_error
+ expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
end
end
@@ -1099,9 +1121,11 @@ RSpec.describe Ci::CreatePipelineService do
project.add_developer(user)
end
- it 'does not create a pipeline' do
- expect(execute_service(trigger_request: trigger_request))
- .not_to be_persisted
+ it 'does not create a pipeline', :aggregate_failures do
+ response = execute_service(trigger_request: trigger_request)
+
+ expect(response).to be_error
+ expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
end
end
@@ -1116,7 +1140,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a pipeline' do
- expect(execute_service(trigger_request: trigger_request))
+ expect(execute_service(trigger_request: trigger_request).payload)
.to be_persisted
expect(Ci::Pipeline.count).to eq(1)
end
@@ -1150,7 +1174,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a tagged pipeline' do
- pipeline = execute_service(ref: 'v1.0.0')
+ pipeline = execute_service(ref: 'v1.0.0').payload
expect(pipeline.tag?).to be true
end
@@ -1161,7 +1185,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/nonexistant-branch' }
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
it 'does not create the pipeline' do
expect(pipeline).not_to be_created_successfully
@@ -1185,7 +1209,7 @@ RSpec.describe Ci::CreatePipelineService do
# v1.1.0 is on the test repo as branch and tag
let(:ref_name) { 'refs/heads/v1.1.0' }
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
it 'creates the pipeline for the branch' do
expect(pipeline).to be_created_successfully
@@ -1200,7 +1224,7 @@ RSpec.describe Ci::CreatePipelineService do
# v1.1.0 is on the test repo as branch and tag
let(:ref_name) { 'refs/tags/v1.1.0' }
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
it 'creates the pipeline for the tag' do
expect(pipeline).to be_created_successfully
@@ -1215,7 +1239,7 @@ RSpec.describe Ci::CreatePipelineService do
# v1.1.0 is on the test repo as branch and tag
let(:ref_name) { 'v1.1.0' }
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
it 'does not create the pipeline' do
expect(pipeline).not_to be_created_successfully
@@ -1229,16 +1253,16 @@ RSpec.describe Ci::CreatePipelineService do
{ key: 'second', secret_value: 'second_world' }]
end
- subject { execute_service(variables_attributes: variables_attributes) }
+ subject(:pipeline) { execute_service(variables_attributes: variables_attributes).payload }
it 'creates a pipeline with specified variables' do
- expect(subject.variables.map { |var| var.slice(:key, :secret_value) })
+ expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
.to eq variables_attributes.map(&:with_indifferent_access)
end
end
context 'when pipeline has a job with environment' do
- let(:pipeline) { execute_service }
+ let(:pipeline) { execute_service.payload }
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
@@ -1286,7 +1310,7 @@ RSpec.describe Ci::CreatePipelineService do
end
describe 'Pipeline for external pull requests' do
- let(:pipeline) do
+ let(:response) do
execute_service(source: source,
external_pull_request: pull_request,
ref: ref_name,
@@ -1294,6 +1318,8 @@ RSpec.describe Ci::CreatePipelineService do
target_sha: target_sha)
end
+ let(:pipeline) { response.payload }
+
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -1342,9 +1368,11 @@ RSpec.describe Ci::CreatePipelineService do
context 'when ref is tag' do
let(:ref_name) { 'refs/tags/v1.1.0' }
- it 'does not create an extrnal pull request pipeline' do
+ it 'does not create an extrnal pull request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('Tag is not included in the list and Failed to build the pipeline!')
expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:tag]).to eq(["is not included in the list"])
+ expect(pipeline.errors[:tag]).to eq(['is not included in the list'])
end
end
@@ -1363,9 +1391,11 @@ RSpec.describe Ci::CreatePipelineService do
}
end
- it 'does not create a detached merge request pipeline' do
+ it 'does not create a detached merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors[:base]).to eq(['No stages / jobs for this pipeline.'])
end
end
end
@@ -1373,7 +1403,9 @@ RSpec.describe Ci::CreatePipelineService do
context 'when external pull request is not specified' do
let(:pull_request) { nil }
- it 'does not create an external pull request pipeline' do
+ it 'does not create an external pull request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq("External pull request can't be blank and Failed to build the pipeline!")
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:external_pull_request]).to eq(["can't be blank"])
end
@@ -1420,11 +1452,11 @@ RSpec.describe Ci::CreatePipelineService do
context 'when external pull request is not specified' do
let(:pull_request) { nil }
- it 'does not create an external pull request pipeline' do
+ it 'does not create an external pull request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq("External pull request can't be blank and Failed to build the pipeline!")
expect(pipeline).not_to be_persisted
-
- expect(pipeline.errors[:base])
- .to eq(['Failed to build the pipeline!'])
+ expect(pipeline.errors[:base]).to eq(['Failed to build the pipeline!'])
end
end
end
@@ -1432,7 +1464,7 @@ RSpec.describe Ci::CreatePipelineService do
end
describe 'Pipelines for merge requests' do
- let(:pipeline) do
+ let(:response) do
execute_service(source: source,
merge_request: merge_request,
ref: ref_name,
@@ -1440,6 +1472,8 @@ RSpec.describe Ci::CreatePipelineService do
target_sha: target_sha)
end
+ let(:pipeline) { response.payload }
+
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -1522,9 +1556,11 @@ RSpec.describe Ci::CreatePipelineService do
context 'when ref is tag' do
let(:ref_name) { 'refs/tags/v1.1.0' }
- it 'does not create a merge request pipeline' do
+ it 'does not create a merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('Tag is not included in the list and Failed to build the pipeline!')
expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:tag]).to eq(["is not included in the list"])
+ expect(pipeline.errors[:tag]).to eq(['is not included in the list'])
end
end
@@ -1564,9 +1600,10 @@ RSpec.describe Ci::CreatePipelineService do
}
end
- it 'does not create a detached merge request pipeline' do
+ it 'does not create a detached merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
end
end
end
@@ -1599,11 +1636,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a detached merge request pipeline' do
+ it 'does not create a detached merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
-
- expect(pipeline.errors[:base])
- .to eq(['No stages / jobs for this pipeline.'])
end
end
end
@@ -1628,11 +1664,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a detached merge request pipeline' do
+ it 'does not create a detached merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
-
- expect(pipeline.errors[:base])
- .to eq(['No stages / jobs for this pipeline.'])
end
end
end
@@ -1659,11 +1694,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a detached merge request pipeline' do
+ it 'does not create a detached merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
-
- expect(pipeline.errors[:base])
- .to eq(['No stages / jobs for this pipeline.'])
end
end
end
@@ -1688,11 +1722,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a detached merge request pipeline' do
+ it 'does not create a detached merge request pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
-
- expect(pipeline.errors[:base])
- .to eq(['No stages / jobs for this pipeline.'])
end
end
end
@@ -1733,7 +1766,8 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when needs is used' do
- let(:pipeline) { execute_service }
+ let(:response) { execute_service }
+ let(:pipeline) { response.payload }
let(:config) do
{
@@ -1779,7 +1813,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/feature' }
shared_examples 'has errors' do
- it 'contains the expected errors' do
+ it 'contains the expected errors', :aggregate_failures do
expect(pipeline.builds).to be_empty
error_message = "'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage"
@@ -1790,9 +1824,12 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when save_on_errors is enabled' do
- let(:pipeline) { execute_service(save_on_errors: true) }
+ let(:response) { execute_service(save_on_errors: true) }
+ let(:pipeline) { response.payload }
- it 'does create a pipeline as test_a depends on build_a' do
+ it 'does create a pipeline as test_a depends on build_a', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq("'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage")
expect(pipeline).to be_persisted
end
@@ -1800,9 +1837,11 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when save_on_errors is disabled' do
- let(:pipeline) { execute_service(save_on_errors: false) }
+ let(:response) { execute_service(save_on_errors: false) }
+ let(:pipeline) { response.payload }
- it 'does not create a pipeline as test_a depends on build_a' do
+ it 'does not create a pipeline as test_a depends on build_a', :aggregate_failures do
+ expect(response).to be_error
expect(pipeline).not_to be_persisted
end
@@ -1822,7 +1861,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'when rules are used' do
let(:ref_name) { 'refs/heads/master' }
- let(:pipeline) { execute_service }
+ let(:response) { execute_service }
+ let(:pipeline) { response.payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:regular_job) { find_job('regular-job') }
let(:rules_job) { find_job('rules-job') }
@@ -2379,8 +2419,9 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when inside freeze period' do
- it 'does not create the pipeline' do
+ it 'does not create the pipeline', :aggregate_failures do
Timecop.freeze(2020, 4, 10, 23, 1) do
+ expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
@@ -2410,7 +2451,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with no matches' do
let(:ref_name) { 'refs/heads/feature' }
- it 'does not create a pipeline' do
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
@@ -2418,7 +2460,7 @@ RSpec.describe Ci::CreatePipelineService do
context 'with workflow rules with pipeline variables' do
let(:pipeline) do
- execute_service(variables_attributes: variables_attributes)
+ execute_service(variables_attributes: variables_attributes).payload
end
let(:config) do
@@ -2446,7 +2488,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with no matches' do
let(:variables_attributes) { {} }
- it 'does not create a pipeline' do
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
@@ -2456,7 +2499,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:pipeline) do
execute_service do |pipeline|
pipeline.variables.build(variables)
- end
+ end.payload
end
let(:config) do
@@ -2514,7 +2557,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with no matches' do
let(:variables) { {} }
- it 'does not create a pipeline' do
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
expect(pipeline).not_to be_persisted
end
@@ -2542,7 +2586,8 @@ RSpec.describe Ci::CreatePipelineService do
EOY
end
- it 'does not create a pipeline' do
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
diff --git a/spec/services/ci/daily_build_group_report_result_service_spec.rb b/spec/services/ci/daily_build_group_report_result_service_spec.rb
index e58a5de26a1..32651247adb 100644
--- a/spec/services/ci/daily_build_group_report_result_service_spec.rb
+++ b/spec/services/ci/daily_build_group_report_result_service_spec.rb
@@ -41,6 +41,17 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
expect(Ci::DailyBuildGroupReportResult.find_by(group_name: 'extra')).to be_nil
end
+ it 'creates a project_ci_feature_usage record for the pipeline project' do
+ described_class.new.execute(pipeline)
+
+ expect(Projects::CiFeatureUsage.count).to eq(1)
+ expect(Projects::CiFeatureUsage.first).to have_attributes(
+ project_id: pipeline.project.id,
+ feature: 'code_coverage',
+ default_branch: false
+ )
+ end
+
context 'when there are multiple builds with the same group name that report coverage' do
let!(:test_job_1) { create(:ci_build, pipeline: pipeline, name: 'test 1/2', coverage: 70) }
let!(:test_job_2) { create(:ci_build, pipeline: pipeline, name: 'test 2/2', coverage: 80) }
@@ -99,6 +110,16 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
data: { 'coverage' => new_karma_job.coverage }
)
end
+
+ it 'does not create a new project_ci_feature_usage record for the pipeline project' do
+ expect { described_class.new.execute(pipeline) }.not_to change { Projects::CiFeatureUsage.count }
+
+ expect(Projects::CiFeatureUsage.first).to have_attributes(
+ project_id: pipeline.project.id,
+ feature: 'code_coverage',
+ default_branch: false
+ )
+ end
end
context 'when the ID of the pipeline is older than the last_pipeline_id' do
@@ -161,6 +182,8 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
it 'does nothing' do
expect { described_class.new.execute(new_pipeline) }.not_to raise_error
+ expect(Ci::DailyBuildGroupReportResult.count).to eq(0)
+ expect(Projects::CiFeatureUsage.count).to eq(0)
end
end
@@ -178,6 +201,17 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
expect(coverage.default_branch).to be_truthy
end
end
+
+ it 'creates a project_ci_feature_usage record for the pipeline project for default branch' do
+ described_class.new.execute(pipeline)
+
+ expect(Projects::CiFeatureUsage.count).to eq(1)
+ expect(Projects::CiFeatureUsage.first).to have_attributes(
+ project_id: pipeline.project.id,
+ feature: 'code_coverage',
+ default_branch: true
+ )
+ end
end
context 'when pipeline ref_path is not the project default branch' do
diff --git a/spec/services/ci/destroy_pipeline_service_spec.rb b/spec/services/ci/destroy_pipeline_service_spec.rb
index 588ff0b1762..6c1c02b2875 100644
--- a/spec/services/ci/destroy_pipeline_service_spec.rb
+++ b/spec/services/ci/destroy_pipeline_service_spec.rb
@@ -78,18 +78,6 @@ RSpec.describe ::Ci::DestroyPipelineService do
subject
end
-
- context 'when cancel_pipelines_prior_to_destroy is disabled' do
- before do
- stub_feature_flags(cancel_pipelines_prior_to_destroy: false)
- end
-
- it "doesn't cancel the pipeline" do
- expect(pipeline).not_to receive(:cancel_running)
-
- subject
- end
- end
end
end
diff --git a/spec/services/ci/drop_pipeline_service_spec.rb b/spec/services/ci/drop_pipeline_service_spec.rb
index 4adbb99b9e2..c6a118c6083 100644
--- a/spec/services/ci/drop_pipeline_service_spec.rb
+++ b/spec/services/ci/drop_pipeline_service_spec.rb
@@ -9,8 +9,10 @@ RSpec.describe Ci::DropPipelineService do
let!(:cancelable_pipeline) { create(:ci_pipeline, :running, user: user) }
let!(:running_build) { create(:ci_build, :running, pipeline: cancelable_pipeline) }
+ let!(:commit_status_running) { create(:commit_status, :running, pipeline: cancelable_pipeline) }
let!(:success_pipeline) { create(:ci_pipeline, :success, user: user) }
let!(:success_build) { create(:ci_build, :success, pipeline: success_pipeline) }
+ let!(:commit_status_success) { create(:commit_status, :success, pipeline: cancelable_pipeline) }
describe '#execute_async_for_all' do
subject { described_class.new.execute_async_for_all(user.pipelines, failure_reason, user) }
diff --git a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
index e25dd351bb3..2b310443b37 100644
--- a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
@@ -6,14 +6,13 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
describe '#execute' do
let_it_be(:project) { create(:project, :auto_devops, :repository) }
let_it_be(:user) { create(:user) }
-
- let(:pull_request) { create(:external_pull_request, project: project) }
+ let_it_be_with_reload(:pull_request) { create(:external_pull_request, project: project) }
before do
project.add_maintainer(user)
end
- subject { described_class.new(project, user).execute(pull_request) }
+ subject(:response) { described_class.new(project, user).execute(pull_request) }
context 'when pull request is open' do
before do
@@ -28,17 +27,20 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(source_branch: source_branch.name, source_sha: source_branch.target)
end
- it 'creates a pipeline for external pull request' do
- expect(subject).to be_valid
- expect(subject).to be_persisted
- expect(subject).to be_external_pull_request_event
- expect(subject).to eq(project.ci_pipelines.last)
- expect(subject.external_pull_request).to eq(pull_request)
- expect(subject.user).to eq(user)
- expect(subject.status).to eq('created')
- expect(subject.ref).to eq(pull_request.source_branch)
- expect(subject.sha).to eq(pull_request.source_sha)
- expect(subject.source_sha).to eq(pull_request.source_sha)
+ it 'creates a pipeline for external pull request', :aggregate_failures do
+ pipeline = response.payload
+
+ expect(response).to be_success
+ expect(pipeline).to be_valid
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_external_pull_request_event
+ expect(pipeline).to eq(project.ci_pipelines.last)
+ expect(pipeline.external_pull_request).to eq(pull_request)
+ expect(pipeline.user).to eq(user)
+ expect(pipeline.status).to eq('created')
+ expect(pipeline.ref).to eq(pull_request.source_branch)
+ expect(pipeline.sha).to eq(pull_request.source_sha)
+ expect(pipeline.source_sha).to eq(pull_request.source_sha)
end
end
@@ -50,10 +52,12 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(source_branch: source_branch.name, source_sha: commit.sha)
end
- it 'does nothing' do
+ it 'does nothing', :aggregate_failures do
expect(Ci::CreatePipelineService).not_to receive(:new)
- expect(subject).to be_nil
+ expect(response).to be_error
+ expect(response.message).to eq('The source sha is not the head of the source branch')
+ expect(response.payload).to be_nil
end
end
end
@@ -63,10 +67,12 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(status: :closed)
end
- it 'does nothing' do
+ it 'does nothing', :aggregate_failures do
expect(Ci::CreatePipelineService).not_to receive(:new)
- expect(subject).to be_nil
+ expect(response).to be_error
+ expect(response.message).to eq('The pull request is not opened')
+ expect(response.payload).to be_nil
end
end
end
diff --git a/spec/services/ci/extract_sections_from_build_trace_service_spec.rb b/spec/services/ci/extract_sections_from_build_trace_service_spec.rb
deleted file mode 100644
index c6ffcdcc6a8..00000000000
--- a/spec/services/ci/extract_sections_from_build_trace_service_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::ExtractSectionsFromBuildTraceService, '#execute' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:build) { create(:ci_build, project: project) }
-
- subject { described_class.new(project, user) }
-
- shared_examples 'build trace has sections markers' do
- before do
- build.trace.set(File.read(expand_fixture_path('trace/trace_with_sections')))
- end
-
- it 'saves the correct extracted sections' do
- expect(build.trace_sections).to be_empty
- expect(subject.execute(build)).to be(true)
- expect(build.trace_sections).not_to be_empty
- end
-
- it "fails if trace_sections isn't empty" do
- expect(subject.execute(build)).to be(true)
- expect(build.trace_sections).not_to be_empty
-
- expect(subject.execute(build)).to be(false)
- expect(build.trace_sections).not_to be_empty
- end
- end
-
- shared_examples 'build trace has no sections markers' do
- before do
- build.trace.set('no markerts')
- end
-
- it 'extracts no sections' do
- expect(build.trace_sections).to be_empty
- expect(subject.execute(build)).to be(true)
- expect(build.trace_sections).to be_empty
- end
- end
-
- context 'when the build has no user' do
- it_behaves_like 'build trace has sections markers'
- it_behaves_like 'build trace has no sections markers'
- end
-
- context 'when the build has a valid user' do
- before do
- build.user = user
- end
-
- it_behaves_like 'build trace has sections markers'
- it_behaves_like 'build trace has no sections markers'
- end
-end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index 5089f8d5dba..a4bc8e68b2d 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -871,7 +871,7 @@ RSpec.shared_examples 'Pipeline Processing Service' do
end
let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push)
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
end
before do
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
index 572808cd2db..b4ad2512593 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -10,7 +10,7 @@ RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
with_them do
let(:test_file) { YAML.load_file(test_file_path) }
- let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline) }
+ let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload }
before do
stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_and_different_stage_needs.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_and_different_stage_needs.yml
new file mode 100644
index 00000000000..115258c656e
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_and_different_stage_needs.yml
@@ -0,0 +1,54 @@
+config:
+ stages: [first, second, third]
+
+ job_a:
+ when: manual
+ stage: first
+ script:
+ - echo
+
+ job_b:
+ when: manual
+ stage: second
+ script:
+ - echo
+
+ job_c:
+ needs: ["job_b"]
+ stage: third
+ script:
+ - echo
+
+ job_d:
+ needs: ["job_a"]
+ stage: third
+ script:
+ - echo
+
+init:
+ expect:
+ pipeline: skipped
+ stages:
+ first: skipped
+ second: skipped
+ third: skipped
+ jobs:
+ job_a: manual
+ job_b: manual
+ job_c: skipped
+ job_d: skipped
+
+transitions:
+ - event: play
+ jobs: [job_b]
+ expect:
+ pipeline: pending
+ stages:
+ first: skipped
+ second: pending
+ third: pending
+ jobs:
+ job_a: manual
+ job_b: pending
+ job_c: created
+ job_d: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_stage_needs.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_stage_needs.yml
new file mode 100644
index 00000000000..fd15f7d1b57
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_same_stage_needs.yml
@@ -0,0 +1,70 @@
+config:
+ stages: [first, second, third, fourth]
+
+ first_job:
+ stage: first
+ script:
+ - echo
+
+ second_job:
+ stage: second
+ script:
+ - echo
+ when: manual
+
+ third_job:
+ stage: third
+ needs: ["second_job"]
+ script:
+ - echo
+
+ fourth_job:
+ stage: fourth
+ needs: ["third_job"]
+ script:
+ - echo
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ first: pending
+ second: created
+ third: created
+ fourth: created
+ jobs:
+ first_job: pending
+ second_job: created
+ third_job: created
+ fourth_job: created
+
+transitions:
+ - event: success
+ jobs: [first_job]
+ expect:
+ pipeline: success
+ stages:
+ first: success
+ second: skipped
+ third: skipped
+ fourth: skipped
+ jobs:
+ first_job: success
+ second_job: manual
+ third_job: skipped
+ fourth_job: skipped
+
+ - event: play
+ jobs: [second_job]
+ expect:
+ pipeline: running
+ stages:
+ first: success
+ second: pending
+ third: skipped
+ fourth: skipped
+ jobs:
+ first_job: success
+ second_job: pending
+ third_job: created
+ fourth_job: created
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index 080ca1cf0cd..2f93b1ecd3c 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -24,9 +24,11 @@ RSpec.describe Ci::PipelineTriggerService do
context 'when the pipeline was not created successfully' do
let(:fail_pipeline) do
receive(:execute).and_wrap_original do |original, *args|
- pipeline = original.call(*args)
+ response = original.call(*args)
+ pipeline = response.payload
pipeline.update!(failure_reason: 'unknown_failure')
- pipeline
+
+ response
end
end
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index a72ffbfdc87..bdf7e577fa8 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::Pipelines::AddJobService do
+ include ExclusiveLeaseHelpers
+
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:job) { build(:ci_build) }
@@ -68,5 +70,38 @@ RSpec.describe Ci::Pipelines::AddJobService do
execute
end
end
+
+ context 'exclusive lock' do
+ let(:lock_uuid) { 'test' }
+ let(:lock_key) { "ci:pipelines:#{pipeline.id}:add-job" }
+ let(:lock_timeout) { 1.minute }
+
+ before do
+ # "Please stub a default value first if message might be received with other args as well."
+ allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
+ end
+
+ it 'uses exclusive lock' do
+ lease = stub_exclusive_lease(lock_key, lock_uuid, timeout: lock_timeout)
+ expect(lease).to receive(:try_obtain)
+ expect(lease).to receive(:cancel)
+
+ expect(execute).to be_success
+ expect(execute.payload[:job]).to eq(job)
+ end
+
+ context 'when the FF ci_pipeline_add_job_with_lock is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_add_job_with_lock: false)
+ end
+
+ it 'does not use exclusive lock' do
+ expect(Gitlab::ExclusiveLease).not_to receive(:new).with(lock_key, timeout: lock_timeout)
+
+ expect(execute).to be_success
+ expect(execute.payload[:job]).to eq(job)
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 6e5d7725a7a..2f37d0ea42d 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
module Ci
RSpec.describe RegisterJobService do
let_it_be(:group) { create(:group) }
- let_it_be(:project, reload: true) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be_with_reload(:project) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
let!(:shared_runner) { create(:ci_runner, :instance) }
let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
@@ -14,7 +14,7 @@ module Ci
let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
describe '#execute' do
- context 'checks database loadbalancing stickiness' do
+ context 'checks database loadbalancing stickiness', :db_load_balancing do
subject { described_class.new(shared_runner).execute }
before do
@@ -22,9 +22,6 @@ module Ci
end
it 'result is valid if replica did caught-up' do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
- .and_return(true)
-
expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
.with(:runner, shared_runner.id) { true }
@@ -32,9 +29,6 @@ module Ci
end
it 'result is invalid if replica did not caught-up' do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
- .and_return(true)
-
expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
.with(:runner, shared_runner.id) { false }
@@ -96,6 +90,9 @@ module Ci
context 'allow shared runners' do
before do
project.update!(shared_runners_enabled: true)
+ pipeline.reload
+ pending_job.reload
+ pending_job.create_queuing_entry!
end
context 'for multiple builds' do
@@ -470,13 +467,27 @@ module Ci
context 'when depended job has not been completed yet' do
let!(:pre_stage_job) { create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it { expect(subject).to eq(pending_job) }
+ it { is_expected.to eq(pending_job) }
end
context 'when artifacts of depended job has been expired' do
let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it_behaves_like 'not pick'
+ context 'when the pipeline is locked' do
+ before do
+ pipeline.artifacts_locked!
+ end
+
+ it { is_expected.to eq(pending_job) }
+ end
+
+ context 'when the pipeline is unlocked' do
+ before do
+ pipeline.unlocked!
+ end
+
+ it_behaves_like 'not pick'
+ end
end
context 'when artifacts of depended job has been erased' do
@@ -493,8 +504,12 @@ module Ci
let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
before do
- allow_any_instance_of(Ci::Build).to receive(:drop!)
- .and_raise(ActiveRecord::StaleObjectError.new(pending_job, :drop!))
+ pipeline.unlocked!
+
+ allow_next_instance_of(Ci::Build) do |build|
+ expect(build).to receive(:drop!)
+ .and_raise(ActiveRecord::StaleObjectError.new(pending_job, :drop!))
+ end
end
it 'does not drop nor pick' do
@@ -709,7 +724,21 @@ module Ci
stub_feature_flags(ci_pending_builds_queue_source: true)
end
- include_examples 'handles runner assignment'
+ context 'with ci_queueing_denormalize_shared_runners_information enabled' do
+ before do
+ stub_feature_flags(ci_queueing_denormalize_shared_runners_information: true)
+ end
+
+ include_examples 'handles runner assignment'
+ end
+
+ context 'with ci_queueing_denormalize_shared_runners_information disabled' do
+ before do
+ stub_feature_flags(ci_queueing_denormalize_shared_runners_information: false)
+ end
+
+ include_examples 'handles runner assignment'
+ end
end
context 'when not using pending builds table' do
@@ -783,6 +812,11 @@ module Ci
end
context 'when shared runner is used' do
+ before do
+ pending_job.reload
+ pending_job.create_queuing_entry!
+ end
+
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
let(:expected_shard) { ::Gitlab::Ci::Queue::Metrics::DEFAULT_METRICS_SHARD }
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
index a741e3b49e7..53aa842bc28 100644
--- a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -51,14 +51,32 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
end
context 'when there are no available resources' do
+ let!(:other_build) { create(:ci_build) }
+
before do
- resource_group.assign_resource_to(create(:ci_build))
+ resource_group.assign_resource_to(other_build)
end
it 'does not request resource' do
expect_any_instance_of(Ci::Build).not_to receive(:enqueue_waiting_for_resource)
subject
+
+ expect(build.reload).to be_waiting_for_resource
+ end
+
+ context 'when there is a stale build assigned to a resource' do
+ before do
+ other_build.doom!
+ other_build.update_column(:updated_at, 10.minutes.ago)
+ end
+
+ it 'releases the resource from the stale build and assignes to the waiting build' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
end
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 42d6e66b38b..ce2e6ba5e15 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Ci::RetryBuildService do
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed security_scans author
pipeline_id report_results pending_state pages_deployments
- queuing_entry runtime_metadata].freeze
+ queuing_entry runtime_metadata trace_metadata].freeze
shared_examples 'build duplication' do
let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
diff --git a/spec/services/dependency_proxy/auth_token_service_spec.rb b/spec/services/dependency_proxy/auth_token_service_spec.rb
index 4b96f9d75a9..6214d75dfa0 100644
--- a/spec/services/dependency_proxy/auth_token_service_spec.rb
+++ b/spec/services/dependency_proxy/auth_token_service_spec.rb
@@ -14,6 +14,19 @@ RSpec.describe DependencyProxy::AuthTokenService do
result = subject
expect(result['user_id']).to eq(user.id)
+ expect(result['deploy_token']).to be_nil
+ end
+
+ context 'with a deploy token' do
+ let_it_be(:deploy_token) { create(:deploy_token) }
+ let_it_be(:token) { build_jwt(deploy_token) }
+
+ it 'returns the deploy token' do
+ result = subject
+
+ expect(result['deploy_token']).to eq(deploy_token.token)
+ expect(result['user_id']).to be_nil
+ end
end
it 'raises an error if the token is expired' do
diff --git a/spec/services/dependency_proxy/download_blob_service_spec.rb b/spec/services/dependency_proxy/download_blob_service_spec.rb
index 4b5c6b5bd6a..2f293b8a46b 100644
--- a/spec/services/dependency_proxy/download_blob_service_spec.rb
+++ b/spec/services/dependency_proxy/download_blob_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe DependencyProxy::DownloadBlobService do
let(:token) { Digest::SHA256.hexdigest('123') }
let(:blob_sha) { Digest::SHA256.hexdigest('ruby:2.7.0') }
- subject { described_class.new(image, blob_sha, token).execute }
+ subject(:download_blob) { described_class.new(image, blob_sha, token).execute }
context 'remote request is successful' do
before do
@@ -18,6 +18,21 @@ RSpec.describe DependencyProxy::DownloadBlobService do
it { expect(subject[:status]).to eq(:success) }
it { expect(subject[:file]).to be_a(Tempfile) }
it { expect(subject[:file].size).to eq(6) }
+
+ it 'streams the download' do
+ expected_options = { headers: anything, stream_body: true }
+
+ expect(Gitlab::HTTP).to receive(:perform_request).with(Net::HTTP::Get, anything, expected_options)
+
+ download_blob
+ end
+
+ it 'skips read_total_timeout', :aggregate_failures do
+ stub_const('GitLab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT', 0)
+
+ expect(Gitlab::Metrics::System).not_to receive(:monotonic_time)
+ expect(download_blob).to include(status: :success)
+ end
end
context 'remote request is not found' do
diff --git a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
index 4ba53d49d38..3fac749be29 100644
--- a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
expect(subject[:blob]).to be_persisted
+ expect(subject[:from_cache]).to eq false
end
end
@@ -36,6 +37,7 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
expect(subject[:blob]).to eq(blob)
+ expect(subject[:from_cache]).to eq true
end
end
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index 1c8ae860d10..5896aa255f0 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
expect(subject[:manifest]).to be_persisted
+ expect(subject[:from_cache]).to eq false
end
end
@@ -62,6 +63,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
+ expect(subject[:from_cache]).to eq true
end
end
@@ -81,6 +83,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
expect(subject[:manifest].content_type).to eq(content_type)
expect(subject[:manifest].digest).to eq(digest)
+ expect(subject[:from_cache]).to eq false
end
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 2e1de367da3..4f761454516 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -66,8 +66,8 @@ RSpec.describe DraftNotes::PublishService do
let(:commit_id) { nil }
before do
- create(:draft_note, merge_request: merge_request, author: user, note: 'first note', commit_id: commit_id, position: position)
- create(:draft_note, merge_request: merge_request, author: user, note: 'second note', commit_id: commit_id, position: position)
+ create(:draft_note_on_text_diff, merge_request: merge_request, author: user, note: 'first note', commit_id: commit_id, position: position)
+ create(:draft_note_on_text_diff, merge_request: merge_request, author: user, note: 'second note', commit_id: commit_id, position: position)
end
context 'when review fails to create' do
@@ -127,6 +127,30 @@ RSpec.describe DraftNotes::PublishService do
publish
end
+ context 'capturing diff notes positions' do
+ before do
+ # Need to execute this to ensure that we'll be able to test creation of
+ # DiffNotePosition records as that only happens when the `MergeRequest#merge_ref_head`
+ # is present. This service creates that for the specified merge request.
+ MergeRequests::MergeToRefService.new(project: project, current_user: user).execute(merge_request)
+ end
+
+ it 'creates diff_note_positions for diff notes' do
+ publish
+
+ notes = merge_request.notes.order(id: :asc)
+ expect(notes.first.diff_note_positions).to be_any
+ expect(notes.last.diff_note_positions).to be_any
+ end
+
+ it 'does not requests a lot from Gitaly', :request_store do
+ # NOTE: This should be reduced as we work on reducing Gitaly calls.
+ # Gitaly requests shouldn't go above this threshold as much as possible
+ # as it may add more to the Gitaly N+1 issue we are experiencing.
+ expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(11)
+ end
+ end
+
context 'commit_id is set' do
let(:commit_id) { commit.id }
diff --git a/spec/services/ci/stop_environments_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index d5ef67c871c..52be512612d 100644
--- a/spec/services/ci/stop_environments_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::StopEnvironmentsService do
+RSpec.describe Environments::StopService do
include CreateEnvironmentsHelpers
let(:project) { create(:project, :private, :repository) }
@@ -11,6 +11,59 @@ RSpec.describe Ci::StopEnvironmentsService do
let(:service) { described_class.new(project, user) }
describe '#execute' do
+ subject { service.execute(environment) }
+
+ let_it_be(:project) { create(:project, :private, :repository) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let(:user) { developer }
+
+ context 'with a deployment' do
+ let!(:environment) { review_job.persisted_environment }
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:review_job) { create(:ci_build, :with_deployment, :start_review_app, pipeline: pipeline, project: project) }
+ let!(:stop_review_job) { create(:ci_build, :with_deployment, :stop_review_app, :manual, pipeline: pipeline, project: project) }
+
+ before do
+ review_job.success!
+ end
+
+ it 'stops the environment' do
+ expect { subject }.to change { environment.reload.state }.from('available').to('stopped')
+ end
+
+ it 'plays the stop action' do
+ expect { subject }.to change { stop_review_job.reload.status }.from('manual').to('pending')
+ end
+
+ context 'when an environment has already been stopped' do
+ let!(:environment) { create(:environment, :stopped, project: project) }
+
+ it 'does not play the stop action' do
+ expect { subject }.not_to change { stop_review_job.reload.status }
+ end
+ end
+ end
+
+ context 'without a deployment' do
+ let!(:environment) { create(:environment, project: project) }
+
+ it 'stops the environment' do
+ expect { subject }.to change { environment.reload.state }.from('available').to('stopped')
+ end
+
+ context 'when the actor is a reporter' do
+ let(:user) { reporter }
+
+ it 'does not stop the environment' do
+ expect { subject }.not_to change { environment.reload.state }
+ end
+ end
+ end
+ end
+
+ describe '#execute_for_branch' do
context 'when environment with review app exists' do
before do
create(:environment, :with_review_app, project: project,
@@ -48,8 +101,9 @@ RSpec.describe Ci::StopEnvironmentsService do
context 'when environment is not stopped' do
before do
- allow_any_instance_of(Environment)
- .to receive(:state).and_return(:stopped)
+ allow_next_found_instance_of(Environment) do |environment|
+ allow(environment).to receive(:state).and_return(:stopped)
+ end
end
it 'does not stop environment' do
@@ -101,7 +155,7 @@ RSpec.describe Ci::StopEnvironmentsService do
context 'when environment does not exist' do
it 'does not raise error' do
- expect { service.execute('master') }
+ expect { service.execute_for_branch('master') }
.not_to raise_error
end
end
@@ -238,16 +292,12 @@ RSpec.describe Ci::StopEnvironmentsService do
end
def expect_environment_stopped_on(branch)
- expect_any_instance_of(Environment)
- .to receive(:stop!)
-
- service.execute(branch)
+ expect { service.execute_for_branch(branch) }
+ .to change { Environment.last.state }.from('available').to('stopped')
end
def expect_environment_not_stopped_on(branch)
- expect_any_instance_of(Environment)
- .not_to receive(:stop!)
-
- service.execute(branch)
+ expect { service.execute_for_branch(branch) }
+ .not_to change { Environment.last.state }
end
end
diff --git a/spec/services/error_tracking/issue_details_service_spec.rb b/spec/services/error_tracking/issue_details_service_spec.rb
index 1954640a512..8cc2688d198 100644
--- a/spec/services/error_tracking/issue_details_service_spec.rb
+++ b/spec/services/error_tracking/issue_details_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe ErrorTracking::IssueDetailsService do
describe '#execute' do
context 'with authorized user' do
context 'when issue_details returns a detailed error' do
- let(:detailed_error) { build(:detailed_error_tracking_error) }
+ let(:detailed_error) { build(:error_tracking_sentry_detailed_error) }
let(:params) { { issue_id: detailed_error.id } }
before do
@@ -39,6 +39,21 @@ RSpec.describe ErrorTracking::IssueDetailsService do
include_examples 'error tracking service data not ready', :issue_details
include_examples 'error tracking service sentry error handling', :issue_details
include_examples 'error tracking service http status handling', :issue_details
+
+ context 'integrated error tracking' do
+ let_it_be(:error) { create(:error_tracking_error, project: project) }
+
+ let(:params) { { issue_id: error.id } }
+
+ before do
+ error_tracking_setting.update!(integrated: true)
+ end
+
+ it 'returns the error in detailed format' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:issue].to_json).to eq(error.to_sentry_detailed_error.to_json)
+ end
+ end
end
include_examples 'error tracking service unauthorized user'
diff --git a/spec/services/error_tracking/issue_latest_event_service_spec.rb b/spec/services/error_tracking/issue_latest_event_service_spec.rb
index b7560762ae4..e914cb1241e 100644
--- a/spec/services/error_tracking/issue_latest_event_service_spec.rb
+++ b/spec/services/error_tracking/issue_latest_event_service_spec.rb
@@ -5,12 +5,14 @@ require 'spec_helper'
RSpec.describe ErrorTracking::IssueLatestEventService do
include_context 'sentry error tracking context'
- subject { described_class.new(project, user) }
+ let(:params) { {} }
+
+ subject { described_class.new(project, user, params) }
describe '#execute' do
context 'with authorized user' do
context 'when issue_latest_event returns an error event' do
- let(:error_event) { build(:error_tracking_error_event) }
+ let(:error_event) { build(:error_tracking_sentry_error_event) }
before do
expect(error_tracking_setting)
@@ -25,6 +27,22 @@ RSpec.describe ErrorTracking::IssueLatestEventService do
include_examples 'error tracking service data not ready', :issue_latest_event
include_examples 'error tracking service sentry error handling', :issue_latest_event
include_examples 'error tracking service http status handling', :issue_latest_event
+
+ context 'integrated error tracking' do
+ let_it_be(:error) { create(:error_tracking_error, project: project) }
+ let_it_be(:event) { create(:error_tracking_error_event, error: error) }
+
+ let(:params) { { issue_id: error.id } }
+
+ before do
+ error_tracking_setting.update!(integrated: true)
+ end
+
+ it 'returns the latest event in expected format' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:latest_event].to_json).to eq(event.to_sentry_error_event.to_json)
+ end
+ end
end
include_examples 'error tracking service unauthorized user'
diff --git a/spec/services/error_tracking/issue_update_service_spec.rb b/spec/services/error_tracking/issue_update_service_spec.rb
index 9ed24038ed8..31a66654100 100644
--- a/spec/services/error_tracking/issue_update_service_spec.rb
+++ b/spec/services/error_tracking/issue_update_service_spec.rb
@@ -114,6 +114,21 @@ RSpec.describe ErrorTracking::IssueUpdateService do
end
include_examples 'error tracking service sentry error handling', :update_issue
+
+ context 'integrated error tracking' do
+ let(:error) { create(:error_tracking_error, project: project) }
+ let(:arguments) { { issue_id: error.id, status: 'resolved' } }
+ let(:update_issue_response) { { updated: true, status: :success, closed_issue_iid: nil } }
+
+ before do
+ error_tracking_setting.update!(integrated: true)
+ end
+
+ it 'resolves the error and responds with expected format' do
+ expect(update_service.execute).to eq(update_issue_response)
+ expect(error.reload.status).to eq('resolved')
+ end
+ end
end
include_examples 'error tracking service unauthorized user'
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index 518f2a80826..b49095ab8b9 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -52,6 +52,20 @@ RSpec.describe ErrorTracking::ListIssuesService do
include_examples 'error tracking service unauthorized user'
include_examples 'error tracking service disabled'
+
+ context 'integrated error tracking' do
+ let_it_be(:error) { create(:error_tracking_error, project: project) }
+
+ before do
+ error_tracking_setting.update!(integrated: true)
+ end
+
+ it 'returns the error in expected format' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:issues].size).to eq(1)
+ expect(result[:issues].first.to_json).to eq(error.to_sentry_error.to_json)
+ end
+ end
end
describe '#external_url' do
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 19694a0a354..a93f594b360 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
end
describe 'Push Event' do
- let(:event) { Event.pushed_action.first }
+ let(:event) { Event.pushed_action.take }
subject(:execute_service) { service.execute }
@@ -134,7 +134,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
context 'when usage ping is disabled' do
before do
- stub_application_setting(usage_ping_enabled: false)
+ allow(::ServicePing::ServicePingSettings).to receive(:enabled?).and_return(false)
end
it 'does not track the event' do
@@ -171,7 +171,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
end
end
- context "with a new branch" do
+ context "with a new default branch" do
let(:oldrev) { Gitlab::Git::BLANK_SHA }
it 'generates a push event with more than one commit' do
@@ -183,12 +183,32 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
expect(event.push_event_payload.commit_from).to be_nil
expect(event.push_event_payload.commit_to).to eq(newrev)
- expect(event.push_event_payload.commit_title).to eq('Initial commit')
+ expect(event.push_event_payload.commit_title).to eq('Change some files')
expect(event.push_event_payload.ref).to eq('master')
expect(event.push_event_payload.commit_count).to be > 1
end
end
+ context "with a new non-default branch" do
+ let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:branch) { 'fix' }
+ let(:commit_id) { project.commit(branch).id }
+
+ it 'generates a push event with more than one commit' do
+ execute_service
+
+ expect(event).to be_an_instance_of(PushEvent)
+ expect(event.project).to eq(project)
+ expect(event).to be_pushed_action
+ expect(event.push_event_payload).to be_an_instance_of(PushEventPayload)
+ expect(event.push_event_payload.commit_from).to be_nil
+ expect(event.push_event_payload.commit_to).to eq(newrev)
+ expect(event.push_event_payload.commit_title).to eq('Test file for directories with a leading dot')
+ expect(event.push_event_payload.ref).to eq('fix')
+ expect(event.push_event_payload.commit_count).to be > 1
+ end
+ end
+
context 'removing a branch' do
let(:newrev) { Gitlab::Git::BLANK_SHA }
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index fc629fe583d..d70e458ba5e 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -151,9 +151,9 @@ RSpec.describe Git::BranchPushService, services: true do
it "when pushing a new branch for the first time" do
expect(UpdateMergeRequestsWorker)
.to receive(:perform_async)
- .with(project.id, user.id, blankrev, 'newrev', ref)
+ .with(project.id, user.id, blankrev, newrev, ref)
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
end
end
@@ -162,13 +162,13 @@ RSpec.describe Git::BranchPushService, services: true do
it "calls the copy attributes method for the first push to the default branch" do
expect(project.repository).to receive(:copy_gitattributes).with('master')
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
end
it "calls the copy attributes method for changes to the default branch" do
expect(project.repository).to receive(:copy_gitattributes).with(ref)
- execute_service(project, user, oldrev: 'oldrev', newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
end
@@ -195,7 +195,7 @@ RSpec.describe Git::BranchPushService, services: true do
it "when pushing a branch for the first time" do
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
@@ -206,7 +206,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
expect(project.protected_branches).to be_empty
end
@@ -216,7 +216,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
@@ -231,7 +231,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(project.default_branch).to eq("master")
expect(ProtectedBranches::CreateService).not_to receive(:new)
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::NO_ACCESS])
@@ -243,7 +243,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
@@ -251,7 +251,7 @@ RSpec.describe Git::BranchPushService, services: true do
it "when pushing new commits to existing branch" do
expect(project).to receive(:execute_hooks)
- execute_service(project, user, oldrev: 'oldrev', newrev: 'newrev', ref: ref)
+ execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
end
end
@@ -597,7 +597,7 @@ RSpec.describe Git::BranchPushService, services: true do
let(:oldrev) { blankrev }
it 'does nothing' do
- expect(::Ci::StopEnvironmentsService).not_to receive(:new)
+ expect(::Environments::StopService).not_to receive(:new)
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
@@ -605,7 +605,7 @@ RSpec.describe Git::BranchPushService, services: true do
context 'update branch' do
it 'does nothing' do
- expect(::Ci::StopEnvironmentsService).not_to receive(:new)
+ expect(::Environments::StopService).not_to receive(:new)
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
@@ -615,10 +615,10 @@ RSpec.describe Git::BranchPushService, services: true do
let(:newrev) { blankrev }
it 'stops environments' do
- expect_next_instance_of(::Ci::StopEnvironmentsService) do |stop_service|
+ expect_next_instance_of(::Environments::StopService) do |stop_service|
expect(stop_service.project).to eq(project)
expect(stop_service.current_user).to eq(user)
- expect(stop_service).to receive(:execute).with(branch)
+ expect(stop_service).to receive(:execute_for_branch).with(branch)
end
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index ac9bac4e6ad..2a223091d0c 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -116,6 +116,8 @@ RSpec.describe Git::ProcessRefChangesService do
if changes_method == :tag_changes
allow_any_instance_of(Repository).to receive(:tag_exists?) { true }
end
+
+ allow(Gitlab::Git::Commit).to receive(:between) { [] }
end
context 'when git_push_create_all_pipelines is disabled' do
@@ -150,6 +152,8 @@ RSpec.describe Git::ProcessRefChangesService do
context 'with invalid .gitlab-ci.yml' do
before do
stub_ci_pipeline_yaml_file(nil)
+
+ allow(Gitlab::Git::Commit).to receive(:between) { [] }
end
it 'does not create a pipeline' do
@@ -190,6 +194,8 @@ RSpec.describe Git::ProcessRefChangesService do
allow(MergeRequests::PushedBranchesService).to receive(:new).and_return(
double(execute: %w(create1 create2)), double(execute: %w(changed1)), double(execute: %w(removed2))
)
+
+ allow(Gitlab::Git::Commit).to receive(:between).and_return([])
end
it 'schedules job for existing merge requests' do
diff --git a/spec/services/ide/terminal_config_service_spec.rb b/spec/services/ide/terminal_config_service_spec.rb
index 483b6413be3..73614f28b06 100644
--- a/spec/services/ide/terminal_config_service_spec.rb
+++ b/spec/services/ide/terminal_config_service_spec.rb
@@ -47,7 +47,6 @@ RSpec.describe Ide::TerminalConfigService do
status: :success,
terminal: {
tag_list: [],
- yaml_variables: [],
job_variables: [],
options: { script: ["sleep 60"] }
})
@@ -62,7 +61,6 @@ RSpec.describe Ide::TerminalConfigService do
status: :success,
terminal: {
tag_list: [],
- yaml_variables: [],
job_variables: [],
options: { before_script: ["ls"], script: ["sleep 60"] }
})
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 9a70de80123..b1d4877e138 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -222,7 +222,7 @@ RSpec.describe Issues::CloseService do
it 'verifies the number of queries' do
recorded = ActiveRecord::QueryRecorder.new { close_issue }
- expected_queries = 24
+ expected_queries = 25
expect(recorded.count).to be <= expected_queries
expect(recorded.cached_count).to eq(0)
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 1e922401028..29ac7df88eb 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -517,7 +517,7 @@ RSpec.describe Issues::UpdateService, :mailer do
update_issue(description: "- [x] Task 1 #{user.to_reference}\n- [ ] Task 2 #{user.to_reference}")
end
- expect(recorded.count).to eq(baseline.count - 1)
+ expect(recorded.count).to eq(baseline.count)
expect(recorded.cached_count).to eq(0)
end
end
diff --git a/spec/services/jira/requests/projects/list_service_spec.rb b/spec/services/jira/requests/projects/list_service_spec.rb
index ab15254d948..78ee9cb9698 100644
--- a/spec/services/jira/requests/projects/list_service_spec.rb
+++ b/spec/services/jira/requests/projects/list_service_spec.rb
@@ -43,20 +43,7 @@ RSpec.describe Jira::Requests::Projects::ListService do
stub_request(:get, expected_url_pattern).to_return(status: 200, body: response_body, headers: response_headers)
end
- context 'when the request to Jira returns an error' do
- before do
- expect_next(JIRA::Client).to receive(:get).and_raise(Timeout::Error)
- end
-
- it 'returns an error response' do
- expect(Gitlab::ProjectServiceLogger).to receive(:error).with(
- hash_including(
- error: hash_including(:exception_class, :exception_message, :exception_backtrace)))
- .and_call_original
- expect(subject.error?).to be_truthy
- expect(subject.message).to eq('Jira request error: Timeout::Error')
- end
- end
+ it_behaves_like 'a service that handles Jira API errors'
context 'when jira runs on a subpath' do
let(:jira_integration) { create(:jira_integration, url: 'http://jira.example.com/jira') }
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index ee5250b5b3d..15ed5c5a33f 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, :sidekiq_inline do
- let_it_be(:source) { create(:project) }
+ let_it_be(:source, reload: true) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:member) { create(:user) }
let_it_be(:user_ids) { member.id.to_s }
@@ -89,7 +89,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
context 'when invite_source is not passed' do
let(:additional_params) { {} }
- it 'tracks the invite source as unknown' do
+ it 'raises an error' do
expect { execute_service }.to raise_error(ArgumentError, 'No invite source provided.')
expect_no_snowplow_event
@@ -126,4 +126,74 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
end
end
end
+
+ context 'when tracking the areas of focus', :snowplow do
+ context 'when areas_of_focus is not passed' do
+ it 'does not track' do
+ execute_service
+
+ expect_no_snowplow_event(category: described_class.name, action: 'area_of_focus')
+ end
+ end
+
+ context 'when 1 areas_of_focus is passed' do
+ let(:additional_params) { { invite_source: '_invite_source_', areas_of_focus: ['no_selection'] } }
+
+ it 'tracks the areas_of_focus from params' do
+ execute_service
+
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'area_of_focus',
+ label: 'no_selection',
+ property: source.members.last.id.to_s
+ )
+ end
+
+ context 'when passing many user ids' do
+ let(:another_user) { create(:user) }
+ let(:user_ids) { [member.id, another_user.id].join(',') }
+
+ it 'tracks the areas_of_focus from params' do
+ execute_service
+
+ members = source.members.last(2)
+
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'area_of_focus',
+ label: 'no_selection',
+ property: members.first.id.to_s
+ )
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'area_of_focus',
+ label: 'no_selection',
+ property: members.last.id.to_s
+ )
+ end
+ end
+ end
+
+ context 'when multiple areas_of_focus are passed' do
+ let(:additional_params) { { invite_source: '_invite_source_', areas_of_focus: %w[no_selection Other] } }
+
+ it 'tracks the areas_of_focus from params' do
+ execute_service
+
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'area_of_focus',
+ label: 'no_selection',
+ property: source.members.last.id.to_s
+ )
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'area_of_focus',
+ label: 'Other',
+ property: source.members.last.id.to_s
+ )
+ end
+ end
+ end
end
diff --git a/spec/services/members/import_project_team_service_spec.rb b/spec/services/members/import_project_team_service_spec.rb
new file mode 100644
index 00000000000..96e8db1ba73
--- /dev/null
+++ b/spec/services/members/import_project_team_service_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::ImportProjectTeamService do
+ describe '#execute' do
+ let_it_be(:source_project) { create(:project) }
+ let_it_be(:target_project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(user, { id: target_project_id, project_id: source_project_id }) }
+
+ before_all do
+ source_project.add_guest(user)
+ target_project.add_maintainer(user)
+ end
+
+ context 'when project team members are imported successfully' do
+ let(:source_project_id) { source_project.id }
+ let(:target_project_id) { target_project.id }
+
+ it 'returns true' do
+ expect(subject.execute).to be(true)
+ end
+ end
+
+ context 'when the project team import fails' do
+ context 'when the target project cannot be found' do
+ let(:source_project_id) { source_project.id }
+ let(:target_project_id) { non_existing_record_id }
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+
+ context 'when the source project cannot be found' do
+ let(:source_project_id) { non_existing_record_id }
+ let(:target_project_id) { target_project.id }
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+
+ context 'when the user doing the import does not exist' do
+ let(:user) { nil }
+ let(:source_project_id) { source_project.id }
+ let(:target_project_id) { target_project.id }
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+
+ context 'when the user does not have permission to read the source project members' do
+ let(:user) { create(:user) }
+ let(:source_project_id) { create(:project, :private).id }
+ let(:target_project_id) { target_project.id }
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+
+ context 'when the user does not have permission to admin the target project' do
+ let(:source_project_id) { source_project.id }
+ let(:target_project_id) { create(:project).id }
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+
+ context 'when the source and target project are valid but the ProjectTeam#import command fails' do
+ let(:source_project_id) { source_project.id }
+ let(:target_project_id) { target_project.id }
+
+ before do
+ allow_next_instance_of(ProjectTeam) do |project_team|
+ allow(project_team).to receive(:import).and_return(false)
+ end
+ end
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/add_spent_time_service_spec.rb b/spec/services/merge_requests/add_spent_time_service_spec.rb
index db3380e9582..1e0b3e07f26 100644
--- a/spec/services/merge_requests/add_spent_time_service_spec.rb
+++ b/spec/services/merge_requests/add_spent_time_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe MergeRequests::AddSpentTimeService do
let_it_be_with_reload(:merge_request) { create(:merge_request, :simple, :unique_branches, source_project: project) }
let(:duration) { 1500 }
- let(:params) { { spend_time: { duration: duration, user_id: user.id } } }
+ let(:params) { { spend_time: { duration: duration, summary: 'summary', user_id: user.id } } }
let(:service) { described_class.new(project: project, current_user: user, params: params) }
describe '#execute' do
@@ -16,13 +16,14 @@ RSpec.describe MergeRequests::AddSpentTimeService do
project.add_developer(user)
end
- it 'creates a new timelog with the specified duration' do
+ it 'creates a new timelog with the specified duration and summary' do
expect { service.execute(merge_request) }.to change { Timelog.count }.from(0).to(1)
timelog = merge_request.timelogs.last
expect(timelog).not_to be_nil
expect(timelog.time_spent).to eq(1500)
+ expect(timelog.summary).to eq('summary')
end
it 'creates a system note with the time added' do
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index f6336a85a25..86d972bc516 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe MergeRequests::CloseService do
end
it 'clean up environments for the merge request' do
- expect_next_instance_of(Ci::StopEnvironmentsService) do |service|
+ expect_next_instance_of(::Environments::StopService) do |service|
expect(service).to receive(:execute_for_merge_request).with(merge_request)
end
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index a0ac168f3d7..d84ce8d15b4 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
describe '#execute' do
- subject { service.execute(merge_request) }
+ subject(:response) { service.execute(merge_request) }
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
@@ -39,14 +39,15 @@ RSpec.describe MergeRequests::CreatePipelineService do
let(:source_project) { project }
it 'creates a detached merge request pipeline' do
- expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ expect { response }.to change { Ci::Pipeline.count }.by(1)
- expect(subject).to be_persisted
- expect(subject).to be_detached_merge_request_pipeline
+ expect(response).to be_success
+ expect(response.payload).to be_persisted
+ expect(response.payload).to be_detached_merge_request_pipeline
end
it 'defaults to merge_request_event' do
- expect(subject.source).to eq('merge_request_event')
+ expect(response.payload.source).to eq('merge_request_event')
end
context 'with fork merge request' do
@@ -58,7 +59,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let(:actor) { user }
it 'creates a pipeline in the target project' do
- expect(subject.project).to eq(project)
+ expect(response.payload.project).to eq(project)
end
context 'when source branch is protected' do
@@ -66,7 +67,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let!(:protected_branch) { create(:protected_branch, name: '*', project: project) }
it 'creates a pipeline in the source project' do
- expect(subject.project).to eq(source_project)
+ expect(response.payload.project).to eq(source_project)
end
end
@@ -74,7 +75,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let!(:protected_branch) { create(:protected_branch, :developers_can_merge, name: '*', project: project) }
it 'creates a pipeline in the target project' do
- expect(subject.project).to eq(project)
+ expect(response.payload.project).to eq(project)
end
end
end
@@ -85,7 +86,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'creates a pipeline in the source project' do
- expect(subject.project).to eq(source_project)
+ expect(response.payload.project).to eq(source_project)
end
end
end
@@ -99,15 +100,16 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'creates a pipeline in the source project' do
- expect(subject.project).to eq(source_project)
+ expect(response.payload.project).to eq(source_project)
end
end
context 'when actor does not have permission to create pipelines' do
let(:actor) { create(:user) }
- it 'returns nothing' do
- expect(subject.full_error_messages).to include('Insufficient permissions to create a new pipeline')
+ it 'responds with error' do
+ expect(response).to be_error
+ expect(response.message).to include('Insufficient permissions to create a new pipeline')
end
end
end
@@ -139,7 +141,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'does not create a pipeline' do
- expect { subject }.not_to change { Ci::Pipeline.count }
+ expect { response }.not_to change { Ci::Pipeline.count }
end
end
@@ -154,7 +156,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'does not create a pipeline' do
- expect { subject }.not_to change { Ci::Pipeline.count }
+ expect { response }.not_to change { Ci::Pipeline.count }
end
end
end
@@ -170,11 +172,12 @@ RSpec.describe MergeRequests::CreatePipelineService do
}
end
- it 'creates a detached merge request pipeline' do
- expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ it 'creates a detached merge request pipeline', :aggregate_failures do
+ expect { response }.to change { Ci::Pipeline.count }.by(1)
- expect(subject).to be_persisted
- expect(subject).to be_detached_merge_request_pipeline
+ expect(response).to be_success
+ expect(response.payload).to be_persisted
+ expect(response.payload).to be_detached_merge_request_pipeline
end
end
@@ -188,10 +191,25 @@ RSpec.describe MergeRequests::CreatePipelineService do
}
end
- it 'does not create a pipeline' do
- expect { subject }.not_to change { Ci::Pipeline.count }
+ it 'does not create a pipeline', :aggregate_failures do
+ expect { response }.not_to change { Ci::Pipeline.count }
+ expect(response).to be_error
end
end
end
+
+ context 'when merge request has no commits' do
+ before do
+ allow(merge_request).to receive(:has_no_commits?).and_return(true)
+ end
+
+ it 'does not create a pipeline', :aggregate_failures do
+ expect { response }.not_to change { Ci::Pipeline.count }
+
+ expect(response).to be_error
+ expect(response.message).to eq('Cannot create a pipeline for this merge request.')
+ expect(response.payload).to be_nil
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index bb764ff5672..8fc12c6c2b1 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -36,6 +36,37 @@ RSpec.describe MergeRequests::MergeToRefService do
expect(repository.ref_exists?(target_ref)).to be(true)
expect(ref_head.id).to eq(result[:commit_id])
end
+
+ context 'cache_merge_to_ref_calls flag enabled', :use_clean_rails_memory_store_caching do
+ before do
+ stub_feature_flags(cache_merge_to_ref_calls: true)
+
+ # warm the cache
+ #
+ service.execute(merge_request)
+ end
+
+ it 'caches the response', :request_store do
+ expect { 3.times { service.execute(merge_request) } }
+ .not_to change(Gitlab::GitalyClient, :get_request_count)
+ end
+ end
+
+ context 'cache_merge_to_ref_calls flag disabled', :use_clean_rails_memory_store_caching do
+ before do
+ stub_feature_flags(cache_merge_to_ref_calls: false)
+
+ # warm the cache
+ #
+ service.execute(merge_request)
+ end
+
+ it 'does not cache the response', :request_store do
+ expect(Gitlab::GitalyClient).to receive(:call).at_least(3).times.and_call_original
+
+ 3.times { service.execute(merge_request) }
+ end
+ end
end
shared_examples_for 'successfully evaluates pre-condition checks' do
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index 14804aa33d4..8d9a32c3e9e 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe MergeRequests::PostMergeService do
end
it 'clean up environments for the merge request' do
- expect_next_instance_of(Ci::StopEnvironmentsService) do |stop_environment_service|
+ expect_next_instance_of(::Environments::StopService) do |stop_environment_service|
expect(stop_environment_service).to receive(:execute_for_merge_request).with(merge_request)
end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 5f76f6f5c44..f00a8928109 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -743,7 +743,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it 'records an error' do
service.execute
- expect(service.errors).to eq(['Branch my-branch does not exist'])
+ expect(service.errors).to eq(["Target branch #{project.full_path}:my-branch does not exist"])
end
end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
index 9d4fcf9ca64..58ba577b7e7 100644
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -39,19 +39,22 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
using RSpec::Parameterized::TableSyntax
where(:track, :interval, :actions_completed) do
- :create | 1 | { created_at: frozen_time - 2.days }
- :create | 5 | { created_at: frozen_time - 6.days }
- :create | 10 | { created_at: frozen_time - 11.days }
- :verify | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
- :verify | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days }
- :verify | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days }
- :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
- :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
- :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
- :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
- :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
- :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
- :experience | 30 | { created_at: frozen_time - 31.days, git_write_at: frozen_time - 31.days }
+ :create | 1 | { created_at: frozen_time - 2.days }
+ :create | 5 | { created_at: frozen_time - 6.days }
+ :create | 10 | { created_at: frozen_time - 11.days }
+ :team_short | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
+ :trial_short | 2 | { created_at: frozen_time - 3.days, git_write_at: frozen_time - 3.days }
+ :admin_verify | 3 | { created_at: frozen_time - 4.days, git_write_at: frozen_time - 4.days }
+ :verify | 4 | { created_at: frozen_time - 5.days, git_write_at: frozen_time - 5.days }
+ :verify | 8 | { created_at: frozen_time - 9.days, git_write_at: frozen_time - 9.days }
+ :verify | 13 | { created_at: frozen_time - 14.days, git_write_at: frozen_time - 14.days }
+ :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
+ :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
+ :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
+ :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
+ :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
+ :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
+ :experience | 30 | { created_at: frozen_time - 31.days, git_write_at: frozen_time - 31.days }
end
with_them do
@@ -60,14 +63,14 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
end
context 'when initialized with a different track' do
- let(:track) { :verify }
+ let(:track) { :team_short }
it { is_expected.not_to send_in_product_marketing_email }
context 'when the previous track actions have been completed' do
let(:current_action_completed_at) { frozen_time - 2.days }
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, :verify, 0) }
+ it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, 0) }
end
end
@@ -168,7 +171,7 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
subject
expect(Notify).to have_received(:in_product_marketing_email).with(user.id, group.id, :create, 0)
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, other_group.id, :verify, 0)
+ expect(Notify).to have_received(:in_product_marketing_email).with(user.id, other_group.id, :team_short, 0)
end
end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 6621ad1f294..793e9ed9848 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -185,6 +185,14 @@ RSpec.describe Notes::CreateService do
expect(note.note_diff_file).to be_present
expect(note.diff_note_positions).to be_present
end
+
+ context 'when skip_capture_diff_note_position execute option is set to true' do
+ it 'does not execute Discussions::CaptureDiffNotePositionService' do
+ expect(Discussions::CaptureDiffNotePositionService).not_to receive(:new)
+
+ described_class.new(project_with_repo, user, new_opts).execute(skip_capture_diff_note_position: true)
+ end
+ end
end
context 'when DiffNote is a reply' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index ac82e4c025f..3c4d7d50002 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe NotificationService, :mailer do
include ExternalAuthorizationServiceHelpers
include NotificationHelpers
- let_it_be(:project, reload: true) { create(:project, :public) }
+ let_it_be_with_refind(:project) { create(:project, :public) }
let_it_be_with_refind(:assignee) { create(:user) }
let(:notification) { described_class.new }
@@ -3368,10 +3368,6 @@ RSpec.describe NotificationService, :mailer do
project.add_maintainer(u_maintainer2)
project.add_developer(u_developer)
- # Mock remote update
- allow(project.repository).to receive(:async_remove_remote)
- allow(project.repository).to receive(:add_remote)
-
reset_delivered_emails!
end
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index 553d58fdd86..2ffd0a269f2 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Packages::Composer::CreatePackageService do
let_it_be(:package_name) { 'composer-package-name' }
let_it_be(:json) { { name: package_name }.to_json }
- let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json } ) }
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json }) }
let_it_be(:user) { create(:user) }
let(:params) do
@@ -24,13 +24,30 @@ RSpec.describe Packages::Composer::CreatePackageService do
let(:created_package) { Packages::Package.composer.last }
+ shared_examples 'using the cache update worker' do
+ context 'with remove_composer_v1_cache_code enabled' do
+ it 'does not enqueue a cache update job' do
+ expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'with remove_composer_v1_cache_code disabled' do
+ it 'enqueues a cache update job' do
+ stub_feature_flags(remove_composer_v1_cache_code: true)
+ expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+
context 'without an existing package' do
context 'with a branch' do
let(:branch) { project.repository.find_branch('master') }
it 'creates the package' do
- expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
-
expect { subject }
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
@@ -47,6 +64,7 @@ RSpec.describe Packages::Composer::CreatePackageService do
it_behaves_like 'assigns build to package'
it_behaves_like 'assigns status to package'
+ it_behaves_like 'using the cache update worker'
end
context 'with a tag' do
@@ -57,8 +75,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
end
it 'creates the package' do
- expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
-
expect { subject }
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
@@ -73,6 +89,7 @@ RSpec.describe Packages::Composer::CreatePackageService do
it_behaves_like 'assigns build to package'
it_behaves_like 'assigns status to package'
+ it_behaves_like 'using the cache update worker'
end
end
@@ -85,12 +102,12 @@ RSpec.describe Packages::Composer::CreatePackageService do
end
it 'does not create a new package' do
- expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
-
expect { subject }
.to change { Packages::Package.composer.count }.by(0)
.and change { Packages::Composer::Metadatum.count }.by(0)
end
+
+ it_behaves_like 'using the cache update worker'
end
context 'belonging to another project' do
@@ -108,12 +125,12 @@ RSpec.describe Packages::Composer::CreatePackageService do
let!(:other_package) { create(:package, name: package_name, version: 'dev-master', project: other_project) }
it 'creates the package' do
- expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
-
expect { subject }
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
end
+
+ it_behaves_like 'using the cache update worker'
end
end
end
diff --git a/spec/services/packages/create_dependency_service_spec.rb b/spec/services/packages/create_dependency_service_spec.rb
index 3eae9f099f7..261c6b395d5 100644
--- a/spec/services/packages/create_dependency_service_spec.rb
+++ b/spec/services/packages/create_dependency_service_spec.rb
@@ -58,8 +58,8 @@ RSpec.describe Packages::CreateDependencyService do
let_it_be(:rows) { [{ name: 'express', version_pattern: '^4.16.4' }] }
it 'creates dependences and links' do
- original_bulk_insert = ::Gitlab::Database.method(:bulk_insert)
- expect(::Gitlab::Database)
+ original_bulk_insert = ::Gitlab::Database.main.method(:bulk_insert)
+ expect(::Gitlab::Database.main)
.to receive(:bulk_insert) do |table, rows, return_ids: false, disable_quote: [], on_conflict: nil|
call_count = table == Packages::Dependency.table_name ? 2 : 1
call_count.times { original_bulk_insert.call(table, rows, return_ids: return_ids, disable_quote: disable_quote, on_conflict: on_conflict) }
diff --git a/spec/services/packages/debian/generate_distribution_key_service_spec.rb b/spec/services/packages/debian/generate_distribution_key_service_spec.rb
index b31830c2d3b..f82d577f071 100644
--- a/spec/services/packages/debian/generate_distribution_key_service_spec.rb
+++ b/spec/services/packages/debian/generate_distribution_key_service_spec.rb
@@ -3,33 +3,21 @@
require 'spec_helper'
RSpec.describe Packages::Debian::GenerateDistributionKeyService do
- let_it_be(:user) { create(:user) }
-
let(:params) { {} }
- subject { described_class.new(current_user: user, params: params) }
+ subject { described_class.new(params: params) }
let(:response) { subject.execute }
- context 'with a user' do
- it 'returns an Hash', :aggregate_failures do
- expect(GPGME::Ctx).to receive(:new).with(armor: true, offline: true).and_call_original
- expect(User).to receive(:random_password).with(no_args).and_call_original
-
- expect(response).to be_a Hash
- expect(response.keys).to contain_exactly(:private_key, :public_key, :fingerprint, :passphrase)
- expect(response[:private_key]).to start_with('-----BEGIN PGP PRIVATE KEY BLOCK-----')
- expect(response[:public_key]).to start_with('-----BEGIN PGP PUBLIC KEY BLOCK-----')
- expect(response[:fingerprint].length).to eq(40)
- expect(response[:passphrase].length).to be > 10
- end
- end
-
- context 'without a user' do
- let(:user) { nil }
+ it 'returns an Hash', :aggregate_failures do
+ expect(GPGME::Ctx).to receive(:new).with(armor: true, offline: true).and_call_original
+ expect(User).to receive(:random_password).with(no_args).and_call_original
- it 'raises an ArgumentError' do
- expect { response }.to raise_error(ArgumentError, 'Please provide a user')
- end
+ expect(response).to be_a Hash
+ expect(response.keys).to contain_exactly(:private_key, :public_key, :fingerprint, :passphrase)
+ expect(response[:private_key]).to start_with('-----BEGIN PGP PRIVATE KEY BLOCK-----')
+ expect(response[:public_key]).to start_with('-----BEGIN PGP PUBLIC KEY BLOCK-----')
+ expect(response[:fingerprint].length).to eq(40)
+ expect(response[:passphrase].length).to be > 10
end
end
diff --git a/spec/services/packages/debian/generate_distribution_service_spec.rb b/spec/services/packages/debian/generate_distribution_service_spec.rb
index a162e492e7e..53805d03655 100644
--- a/spec/services/packages/debian/generate_distribution_service_spec.rb
+++ b/spec/services/packages/debian/generate_distribution_service_spec.rb
@@ -6,19 +6,16 @@ RSpec.describe Packages::Debian::GenerateDistributionService do
describe '#execute' do
subject { described_class.new(distribution).execute }
+ let(:subject2) { described_class.new(distribution).execute }
+ let(:subject3) { described_class.new(distribution).execute }
+
include_context 'with published Debian package'
[:project, :group].each do |container_type|
context "for #{container_type}" do
include_context 'with Debian distribution', container_type
- context 'with Debian components and architectures' do
- it_behaves_like 'Generate Debian Distribution and component files'
- end
-
- context 'without components and architectures' do
- it_behaves_like 'Generate minimal Debian Distribution'
- end
+ it_behaves_like 'Generate Debian Distribution and component files'
end
end
end
diff --git a/spec/services/packages/debian/sign_distribution_service_spec.rb b/spec/services/packages/debian/sign_distribution_service_spec.rb
new file mode 100644
index 00000000000..2aec0e50636
--- /dev/null
+++ b/spec/services/packages/debian/sign_distribution_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::SignDistributionService do
+ let_it_be(:group) { create(:group, :public) }
+
+ let(:content) { FFaker::Lorem.paragraph }
+ let(:service) { described_class.new(distribution, content, detach: detach) }
+
+ shared_examples 'Sign Distribution' do |container_type, detach: false|
+ context "for #{container_type} detach=#{detach}" do
+ let(:detach) { detach }
+
+ if container_type == :group
+ let_it_be(:distribution) { create('debian_group_distribution', container: group) }
+ else
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:distribution) { create('debian_project_distribution', container: project) }
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'without an existing key' do
+ it 'raises ArgumentError', :aggregate_failures do
+ expect { subject }
+ .to raise_error(ArgumentError, 'distribution key is missing')
+ end
+ end
+
+ context 'with an existing key' do
+ let!(:key) { create("debian_#{container_type}_distribution_key", distribution: distribution)}
+
+ it 'returns the content signed', :aggregate_failures do
+ expect(Packages::Debian::GenerateDistributionKeyService).not_to receive(:new)
+
+ key_class = "Packages::Debian::#{container_type.capitalize}DistributionKey".constantize
+
+ expect { subject }
+ .to not_change { key_class.count }
+
+ if detach
+ expect(subject).to start_with("-----BEGIN PGP SIGNATURE-----\n")
+ else
+ expect(subject).to start_with("-----BEGIN PGP SIGNED MESSAGE-----\nHash: SHA256\n\n#{content}\n-----BEGIN PGP SIGNATURE-----\n")
+ end
+
+ expect(subject).to end_with("\n-----END PGP SIGNATURE-----\n")
+ end
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'Sign Distribution', :project
+ it_behaves_like 'Sign Distribution', :project, detach: true
+ it_behaves_like 'Sign Distribution', :group
+ it_behaves_like 'Sign Distribution', :group, detach: true
+end
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index 328484c3e5a..66ff6a8d03f 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
allow(service).to receive(:metadata).and_return(metadata)
end
- it 'does not update the package' do
+ it 'does not update the package', :aggregate_failures do
expect(service).to receive(:try_obtain_lease).and_call_original
expect { subject }
@@ -63,213 +63,234 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
end
end
- context 'with no existing package' do
- let(:package_id) { package.id }
-
- it 'updates package and package file' do
- expect { subject }
- .to change { ::Packages::Package.count }.by(1)
- .and change { Packages::Dependency.count }.by(1)
- .and change { Packages::DependencyLink.count }.by(1)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
-
- expect(package.reload.name).to eq(package_name)
- expect(package.version).to eq(package_version)
- expect(package).to be_default
- expect(package_file.reload.file_name).to eq(package_file_name)
- # hard reset needed to properly reload package_file.file
- expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
- end
-
- it_behaves_like 'taking the lease'
+ shared_examples 'handling all conditions' do
+ context 'with no existing package' do
+ let(:package_id) { package.id }
- it_behaves_like 'not updating the package if the lease is taken'
- end
+ it 'updates package and package file', :aggregate_failures do
+ expect { subject }
+ .to not_change { ::Packages::Package.count }
+ .and change { Packages::Dependency.count }.by(1)
+ .and change { Packages::DependencyLink.count }.by(1)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
- context 'with existing package' do
- let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
- let(:package_id) { existing_package.id }
+ expect(package.reload.name).to eq(package_name)
+ expect(package.version).to eq(package_version)
+ expect(package).to be_default
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ # hard reset needed to properly reload package_file.file
+ expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
+ end
- it 'link existing package and updates package file' do
- expect(service).to receive(:try_obtain_lease).and_call_original
+ it_behaves_like 'taking the lease'
- expect { subject }
- .to change { ::Packages::Package.count }.by(-1)
- .and change { Packages::Dependency.count }.by(0)
- .and change { Packages::DependencyLink.count }.by(0)
- .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
- expect(package_file.reload.file_name).to eq(package_file_name)
- expect(package_file.package).to eq(existing_package)
+ it_behaves_like 'not updating the package if the lease is taken'
end
- it_behaves_like 'taking the lease'
-
- it_behaves_like 'not updating the package if the lease is taken'
- end
+ context 'with existing package' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
+ let(:package_id) { existing_package.id }
- context 'with a nuspec file with metadata' do
- let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
- let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
+ it 'link existing package and updates package file', :aggregate_failures do
+ expect(service).to receive(:try_obtain_lease).and_call_original
- before do
- allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
- allow(service)
- .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ expect { subject }
+ .to change { ::Packages::Package.count }.by(-1)
+ .and change { Packages::Dependency.count }.by(0)
+ .and change { Packages::DependencyLink.count }.by(0)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ expect(package_file.package).to eq(existing_package)
end
- end
- it 'creates tags' do
- expect(service).to receive(:try_obtain_lease).and_call_original
- expect { subject }.to change { ::Packages::Tag.count }.by(8)
- expect(package.reload.tags.map(&:name)).to contain_exactly(*expected_tags)
+ it_behaves_like 'taking the lease'
+
+ it_behaves_like 'not updating the package if the lease is taken'
end
- context 'with existing package and tags' do
- let!(:existing_package) { create(:nuget_package, project: package.project, name: 'DummyProject.WithMetadata', version: '1.2.3') }
- let!(:tag1) { create(:packages_tag, package: existing_package, name: 'tag1') }
- let!(:tag2) { create(:packages_tag, package: existing_package, name: 'tag2') }
- let!(:tag3) { create(:packages_tag, package: existing_package, name: 'tag_not_in_metadata') }
+ context 'with a nuspec file with metadata' do
+ let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
+ let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
+
+ before do
+ allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
+ allow(service)
+ .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ end
+ end
- it 'creates tags and deletes those not in metadata' do
+ it 'creates tags' do
expect(service).to receive(:try_obtain_lease).and_call_original
- expect { subject }.to change { ::Packages::Tag.count }.by(5)
- expect(existing_package.tags.map(&:name)).to contain_exactly(*expected_tags)
+ expect { subject }.to change { ::Packages::Tag.count }.by(8)
+ expect(package.reload.tags.map(&:name)).to contain_exactly(*expected_tags)
end
- end
- it 'creates nuget metadatum' do
- expect { subject }
- .to change { ::Packages::Package.count }.by(1)
- .and change { ::Packages::Nuget::Metadatum.count }.by(1)
+ context 'with existing package and tags' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: 'DummyProject.WithMetadata', version: '1.2.3') }
+ let!(:tag1) { create(:packages_tag, package: existing_package, name: 'tag1') }
+ let!(:tag2) { create(:packages_tag, package: existing_package, name: 'tag2') }
+ let!(:tag3) { create(:packages_tag, package: existing_package, name: 'tag_not_in_metadata') }
+
+ it 'creates tags and deletes those not in metadata' do
+ expect(service).to receive(:try_obtain_lease).and_call_original
+ expect { subject }.to change { ::Packages::Tag.count }.by(5)
+ expect(existing_package.tags.map(&:name)).to contain_exactly(*expected_tags)
+ end
+ end
- metadatum = package_file.reload.package.nuget_metadatum
- expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
- expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
- expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
- end
+ it 'creates nuget metadatum', :aggregate_failures do
+ expect { subject }
+ .to not_change { ::Packages::Package.count }
+ .and change { ::Packages::Nuget::Metadatum.count }.by(1)
- context 'with too long url' do
- let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
+ metadatum = package_file.reload.package.nuget_metadatum
+ expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
+ expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
+ expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
+ end
- let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
+ context 'with too long url' do
+ let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
- before do
- allow(service).to receive(:metadata).and_return(metadata)
- end
+ let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
+
+ before do
+ allow(service).to receive(:metadata).and_return(metadata)
+ end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ end
end
- end
- context 'with nuspec file with dependencies' do
- let(:nuspec_filepath) { 'packages/nuget/with_dependencies.nuspec' }
- let(:package_name) { 'Test.Package' }
- let(:package_version) { '3.5.2' }
- let(:package_file_name) { 'test.package.3.5.2.nupkg' }
+ context 'with nuspec file with dependencies' do
+ let(:nuspec_filepath) { 'packages/nuget/with_dependencies.nuspec' }
+ let(:package_name) { 'Test.Package' }
+ let(:package_version) { '3.5.2' }
+ let(:package_file_name) { 'test.package.3.5.2.nupkg' }
- before do
- allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
- allow(service)
- .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ before do
+ allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
+ allow(service)
+ .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ end
end
- end
- it 'updates package and package file' do
- expect { subject }
- .to change { ::Packages::Package.count }.by(1)
- .and change { Packages::Dependency.count }.by(4)
- .and change { Packages::DependencyLink.count }.by(4)
- .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(2)
-
- expect(package.reload.name).to eq(package_name)
- expect(package.version).to eq(package_version)
- expect(package).to be_default
- expect(package_file.reload.file_name).to eq(package_file_name)
- # hard reset needed to properly reload package_file.file
- expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
+ it 'updates package and package file', :aggregate_failures do
+ expect { subject }
+ .to not_change { ::Packages::Package.count }
+ .and change { Packages::Dependency.count }.by(4)
+ .and change { Packages::DependencyLink.count }.by(4)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(2)
+
+ expect(package.reload.name).to eq(package_name)
+ expect(package.version).to eq(package_version)
+ expect(package).to be_default
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ # hard reset needed to properly reload package_file.file
+ expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
+ end
end
- end
- context 'with package file not containing a nuspec file' do
- before do
- allow_next_instance_of(Zip::File) do |file|
- allow(file).to receive(:glob).and_return([])
+ context 'with package file not containing a nuspec file' do
+ before do
+ allow_next_instance_of(Zip::File) do |file|
+ allow(file).to receive(:glob).and_return([])
+ end
end
+
+ it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
end
- it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
- end
+ context 'with a symbol package' do
+ let(:package_file) { package.package_files.last }
+ let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
- context 'with a symbol package' do
- let(:package_file) { package.package_files.last }
- let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
+ context 'with no existing package' do
+ let(:package_id) { package.id }
- context 'with no existing package' do
- let(:package_id) { package.id }
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
- end
+ context 'with existing package' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
+ let(:package_id) { existing_package.id }
- context 'with existing package' do
- let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
- let(:package_id) { existing_package.id }
+ it 'link existing package and updates package file', :aggregate_failures do
+ expect(service).to receive(:try_obtain_lease).and_call_original
+ expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
+ expect(::Packages::UpdateTagsService).not_to receive(:new)
- it 'link existing package and updates package file', :aggregate_failures do
- expect(service).to receive(:try_obtain_lease).and_call_original
- expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
- expect(::Packages::UpdateTagsService).not_to receive(:new)
-
- expect { subject }
- .to change { ::Packages::Package.count }.by(-1)
- .and change { Packages::Dependency.count }.by(0)
- .and change { Packages::DependencyLink.count }.by(0)
- .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
- expect(package_file.reload.file_name).to eq(package_file_name)
- expect(package_file.package).to eq(existing_package)
- end
+ expect { subject }
+ .to change { ::Packages::Package.count }.by(-1)
+ .and change { Packages::Dependency.count }.by(0)
+ .and change { Packages::DependencyLink.count }.by(0)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ expect(package_file.package).to eq(existing_package)
+ end
- it_behaves_like 'taking the lease'
+ it_behaves_like 'taking the lease'
- it_behaves_like 'not updating the package if the lease is taken'
+ it_behaves_like 'not updating the package if the lease is taken'
+ end
end
- end
- context 'with an invalid package name' do
- invalid_names = [
- '',
- 'My/package',
- '../../../my_package',
- '%2e%2e%2fmy_package'
- ]
+ context 'with an invalid package name' do
+ invalid_names = [
+ '',
+ 'My/package',
+ '../../../my_package',
+ '%2e%2e%2fmy_package'
+ ]
- invalid_names.each do |invalid_name|
- before do
- allow(service).to receive(:package_name).and_return(invalid_name)
+ invalid_names.each do |invalid_name|
+ before do
+ allow(service).to receive(:package_name).and_return(invalid_name)
+ end
+
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
+ end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ context 'with an invalid package version' do
+ invalid_versions = [
+ '',
+ '555',
+ '1.2',
+ '1./2.3',
+ '../../../../../1.2.3',
+ '%2e%2e%2f1.2.3'
+ ]
+
+ invalid_versions.each do |invalid_version|
+ before do
+ allow(service).to receive(:package_version).and_return(invalid_version)
+ end
+
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ end
end
end
- context 'with an invalid package version' do
- invalid_versions = [
- '',
- '555',
- '1.2',
- '1./2.3',
- '../../../../../1.2.3',
- '%2e%2e%2f1.2.3'
- ]
-
- invalid_versions.each do |invalid_version|
- before do
- allow(service).to receive(:package_version).and_return(invalid_version)
- end
+ context 'with packages_nuget_new_package_file_updater enabled' do
+ before do
+ expect(service).not_to receive(:legacy_execute)
+ end
+
+ it_behaves_like 'handling all conditions'
+ end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ context 'with packages_nuget_new_package_file_updater disabled' do
+ before do
+ stub_feature_flags(packages_nuget_new_package_file_updater: false)
+ expect(::Packages::UpdatePackageFileService)
+ .not_to receive(:new).with(package_file, instance_of(Hash)).and_call_original
+ expect(service).not_to receive(:new_execute)
end
+
+ it_behaves_like 'handling all conditions'
end
end
end
diff --git a/spec/services/packages/update_package_file_service_spec.rb b/spec/services/packages/update_package_file_service_spec.rb
new file mode 100644
index 00000000000..d988049c43a
--- /dev/null
+++ b/spec/services/packages/update_package_file_service_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::UpdatePackageFileService do
+ let_it_be(:another_package) { create(:package) }
+ let_it_be(:old_file_name) { 'old_file_name.txt' }
+ let_it_be(:new_file_name) { 'new_file_name.txt' }
+
+ let(:package) { package_file.package }
+ let(:params) { { package_id: another_package.id, file_name: new_file_name } }
+ let(:service) { described_class.new(package_file, params) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'updating package file with valid parameters' do
+ context 'with both parameters set' do
+ it 'updates the package file accordingly' do
+ expect { subject }
+ .to change { package.package_files.count }.from(1).to(0)
+ .and change { another_package.package_files.count }.from(0).to(1)
+ .and change { package_file.package_id }.from(package.id).to(another_package.id)
+ .and change { package_file.file_name }.from(old_file_name).to(new_file_name)
+ end
+ end
+
+ context 'with only file_name set' do
+ let(:params) { { file_name: new_file_name } }
+
+ it 'updates the package file accordingly' do
+ expect { subject }
+ .to not_change { package.package_files.count }
+ .and not_change { another_package.package_files.count }
+ .and not_change { package_file.package_id }
+ .and change { package_file.file_name }.from(old_file_name).to(new_file_name)
+ end
+ end
+
+ context 'with only package_id set' do
+ let(:params) { { package_id: another_package.id } }
+
+ it 'updates the package file accordingly' do
+ expect { subject }
+ .to change { package.package_files.count }.from(1).to(0)
+ .and change { another_package.package_files.count }.from(0).to(1)
+ .and change { package_file.package_id }.from(package.id).to(another_package.id)
+ .and not_change { package_file.file_name }
+ end
+ end
+ end
+
+ shared_examples 'not updating package with invalid parameters' do
+ context 'with blank parameters' do
+ let(:params) { {} }
+
+ it 'raise an argument error' do
+ expect { subject }.to raise_error(ArgumentError, 'package_id and file_name are blank')
+ end
+ end
+
+ context 'with non persisted package file' do
+ let(:package_file) { build(:package_file) }
+
+ it 'raise an argument error' do
+ expect { subject }.to raise_error(ArgumentError, 'package_file not persisted')
+ end
+ end
+ end
+
+ context 'with object storage disabled' do
+ let(:package_file) { create(:package_file, file_name: old_file_name) }
+
+ before do
+ stub_package_file_object_storage(enabled: false)
+ end
+
+ it_behaves_like 'updating package file with valid parameters' do
+ before do
+ expect(package_file).to receive(:remove_previously_stored_file).and_call_original
+ expect(package_file).not_to receive(:move_in_object_storage)
+ end
+ end
+
+ it_behaves_like 'not updating package with invalid parameters'
+ end
+
+ context 'with object storage enabled' do
+ let(:package_file) do
+ create(
+ :package_file,
+ file_name: old_file_name,
+ file: CarrierWaveStringFile.new_file(
+ file_content: 'content',
+ filename: old_file_name,
+ content_type: 'text/plain'
+ ),
+ file_store: ::Packages::PackageFileUploader::Store::REMOTE
+ )
+ end
+
+ before do
+ stub_package_file_object_storage(enabled: true)
+ end
+
+ it_behaves_like 'updating package file with valid parameters' do
+ before do
+ expect(package_file).not_to receive(:remove_previously_stored_file)
+ expect(package_file).to receive(:move_in_object_storage).and_call_original
+ end
+ end
+
+ it_behaves_like 'not updating package with invalid parameters' do
+ before do
+ expect(package_file.file.file).not_to receive(:copy_to)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/update_tags_service_spec.rb b/spec/services/packages/update_tags_service_spec.rb
index 4a122d1c718..6e67489fec9 100644
--- a/spec/services/packages/update_tags_service_spec.rb
+++ b/spec/services/packages/update_tags_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Packages::UpdateTagsService do
it 'is a no op' do
expect(package).not_to receive(:tags)
- expect(::Gitlab::Database).not_to receive(:bulk_insert)
+ expect(::Gitlab::Database.main).not_to receive(:bulk_insert)
subject
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 871ed95bf28..3f4d37e5ddc 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -131,6 +131,12 @@ RSpec.describe PostReceiveService do
project.add_developer(user)
end
+ it 'invalidates the branch name cache' do
+ expect(service.repository).to receive(:expire_branches_cache).and_call_original
+
+ subject
+ end
+
it 'invokes MergeRequests::PushOptionsHandlerService' do
expect(MergeRequests::PushOptionsHandlerService).to receive(:new).and_call_original
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index defeadb479a..c3928563125 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -135,7 +135,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { project: subject.full_path, related_class: described_class.to_s } }
+ let(:expected_params) { { project: subject.full_path } }
subject { create_project(user, opts) }
end
@@ -335,7 +335,7 @@ RSpec.describe Projects::CreateService, '#execute' do
it 'does not write repository config' do
expect_next_instance_of(Project) do |project|
- expect(project).not_to receive(:write_repository_config)
+ expect(project).not_to receive(:set_full_path)
end
imported_project
@@ -607,65 +607,55 @@ RSpec.describe Projects::CreateService, '#execute' do
describe 'create integration for the project' do
subject(:project) { create_project(user, opts) }
- context 'with an active integration template' do
- let!(:template_integration) { create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/') }
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
- it 'creates an integration from the template' do
+ it 'creates an integration from the instance-level integration' do
expect(project.integrations.count).to eq(1)
- expect(project.integrations.first.api_url).to eq(template_integration.api_url)
- expect(project.integrations.first.inherit_from_id).to be_nil
+ expect(project.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to eq(instance_integration.id)
end
- context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
+ context 'with an active group-level integration' do
+ let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
- it 'creates an integration from the instance-level integration' do
+ let(:opts) do
+ {
+ name: 'GitLab',
+ namespace_id: group.id
+ }
+ end
+
+ it 'creates an integration from the group-level integration' do
expect(project.integrations.count).to eq(1)
- expect(project.integrations.first.api_url).to eq(instance_integration.api_url)
- expect(project.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ expect(project.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to eq(group_integration.id)
end
- context 'with an active group-level integration' do
- let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
- let!(:group) do
- create(:group).tap do |group|
- group.add_owner(user)
+ context 'with an active subgroup' do
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup) do
+ create(:group, parent: group).tap do |subgroup|
+ subgroup.add_owner(user)
end
end
let(:opts) do
{
name: 'GitLab',
- namespace_id: group.id
+ namespace_id: subgroup.id
}
end
- it 'creates an integration from the group-level integration' do
+ it 'creates an integration from the subgroup-level integration' do
expect(project.integrations.count).to eq(1)
- expect(project.integrations.first.api_url).to eq(group_integration.api_url)
- expect(project.integrations.first.inherit_from_id).to eq(group_integration.id)
- end
-
- context 'with an active subgroup' do
- let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
- let!(:subgroup) do
- create(:group, parent: group).tap do |subgroup|
- subgroup.add_owner(user)
- end
- end
-
- let(:opts) do
- {
- name: 'GitLab',
- namespace_id: subgroup.id
- }
- end
-
- it 'creates an integration from the subgroup-level integration' do
- expect(project.integrations.count).to eq(1)
- expect(project.integrations.first.api_url).to eq(subgroup_integration.api_url)
- expect(project.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
- end
+ expect(project.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
end
end
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 47252bcf7a7..d0064873972 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -96,13 +96,13 @@ RSpec.describe Projects::HashedStorage::MigrateRepositoryService do
end
it 'handles Gitlab::Git::CommandError' do
- expect(project).to receive(:write_repository_config).and_raise(Gitlab::Git::CommandError)
+ expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
expect { service.execute }.not_to raise_exception
end
it 'ensures rollback when Gitlab::Git::CommandError' do
- expect(project).to receive(:write_repository_config).and_raise(Gitlab::Git::CommandError)
+ expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
expect(service).to receive(:rollback_folder_move).and_call_original
service.execute
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
index af128a532b9..23e776b72bc 100644
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -96,13 +96,13 @@ RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab
end
it 'handles Gitlab::Git::CommandError' do
- expect(project).to receive(:write_repository_config).and_raise(Gitlab::Git::CommandError)
+ expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
expect { service.execute }.not_to raise_exception
end
it 'ensures rollback when Gitlab::Git::CommandError' do
- expect(project).to receive(:write_repository_config).and_raise(Gitlab::Git::CommandError)
+ expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
expect(service).to receive(:rollback_folder_move).and_call_original
service.execute
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index f27ebb2e19e..f9ff959fa05 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -90,6 +90,21 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
expect(File.binread(LfsObject.first.file.file.file)).to eq lfs_content
end
+
+ it 'streams the download' do
+ expected_options = { headers: anything, stream_body: true }
+
+ expect(Gitlab::HTTP).to receive(:perform_request).with(Net::HTTP::Get, anything, expected_options)
+
+ subject.execute
+ end
+
+ it 'skips read_total_timeout', :aggregate_failures do
+ stub_const('GitLab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT', 0)
+
+ expect(Gitlab::Metrics::System).not_to receive(:monotonic_time)
+ expect(subject.execute).to include(status: :success)
+ end
end
context 'when file download fails' do
diff --git a/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
index 0799a33f856..981d7027a17 100644
--- a/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
@@ -34,10 +34,24 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
subject.execute
end
- it 'retrieves the download links of non existent objects' do
- expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(all_oids)
+ context 'when no LFS objects exist' do
+ before do
+ project.lfs_objects.delete_all
+ end
- subject.execute
+ it 'retrieves all LFS objects' do
+ expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(all_oids)
+
+ subject.execute
+ end
+ end
+
+ context 'when some LFS objects already exist' do
+ it 'retrieves the download links of non-existent objects' do
+ expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(oids)
+
+ subject.execute
+ end
end
end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index f91f879b772..1d9d5f6e938 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -262,6 +262,31 @@ RSpec.describe Projects::Operations::UpdateService do
expect(project.error_tracking_setting.previous_changes.keys)
.to contain_exactly('enabled')
end
+
+ context 'with integrated attribute' do
+ let(:params) do
+ {
+ error_tracking_setting_attributes: {
+ enabled: true,
+ integrated: true
+ }
+ }
+ end
+
+ it 'updates integrated attribute' do
+ expect { result }
+ .to change { project.reload.error_tracking_setting.integrated }
+ .from(false)
+ .to(true)
+ end
+
+ it 'only updates enabled and integrated attributes' do
+ result
+
+ expect(project.error_tracking_setting.previous_changes.keys)
+ .to contain_exactly('enabled', 'integrated')
+ end
+ end
end
context 'without setting' do
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 5898504b463..0f21736eda0 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -4,6 +4,8 @@ require "spec_helper"
RSpec.describe Projects::UpdatePagesService do
let_it_be(:project, refind: true) { create(:project, :repository) }
+
+ let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
@@ -94,6 +96,7 @@ RSpec.describe Projects::UpdatePagesService do
expect(deployment.file_count).to eq(3)
expect(deployment.file_sha256).to eq(artifacts_archive.file_sha256)
expect(project.pages_metadatum.reload.pages_deployment_id).to eq(deployment.id)
+ expect(deployment.ci_build_id).to eq(build.id)
end
it 'fails if another deployment is in progress' do
@@ -106,19 +109,6 @@ RSpec.describe Projects::UpdatePagesService do
end
end
- it 'fails if sha on branch was updated before deployment was uploaded' do
- expect(subject).to receive(:create_pages_deployment).and_wrap_original do |m, *args|
- build.update!(ref: 'feature')
- m.call(*args)
- end
-
- expect(execute).not_to eq(:success)
- expect(project.pages_metadatum).not_to be_deployed
-
- expect(deploy_status).to be_failed
- expect(deploy_status.description).to eq('build SHA is outdated for this ref')
- end
-
it 'does not fail if pages_metadata is absent' do
project.pages_metadatum.destroy!
project.reload
@@ -158,6 +148,14 @@ RSpec.describe Projects::UpdatePagesService do
expect(execute).not_to eq(:success)
end
+ it 'limits pages file count' do
+ create(:plan_limits, :default_plan, pages_file_entries: 2)
+
+ expect(execute).not_to eq(:success)
+
+ expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
+ end
+
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
@@ -175,16 +173,6 @@ RSpec.describe Projects::UpdatePagesService do
expect(ProjectPagesMetadatum.find_by_project_id(project)).to be_nil
end
- it 'fails if sha on branch is not latest' do
- build.update!(ref: 'feature')
-
- expect(execute).not_to eq(:success)
- expect(project.pages_metadatum).not_to be_deployed
-
- expect(deploy_status).to be_failed
- expect(deploy_status.description).to eq('build SHA is outdated for this ref')
- end
-
context 'when using empty file' do
let(:file) { empty_file }
@@ -259,6 +247,75 @@ RSpec.describe Projects::UpdatePagesService do
expect(execute).to eq(:success)
end
end
+
+ context "when sha on branch was updated before deployment was uploaded" do
+ before do
+ expect(subject).to receive(:create_pages_deployment).and_wrap_original do |m, *args|
+ build.update!(ref: 'feature')
+ m.call(*args)
+ end
+ end
+
+ shared_examples 'fails with outdated reference message' do
+ it 'fails' do
+ expect(execute).not_to eq(:success)
+ expect(project.reload.pages_metadatum).not_to be_deployed
+
+ expect(deploy_status).to be_failed
+ expect(deploy_status.description).to eq('build SHA is outdated for this ref')
+ end
+ end
+
+ shared_examples 'successfully deploys' do
+ it 'succeeds' do
+ expect do
+ expect(execute).to eq(:success)
+ end.to change { project.pages_deployments.count }.by(1)
+
+ deployment = project.pages_deployments.last
+ expect(deployment.ci_build_id).to eq(build.id)
+ end
+ end
+
+ include_examples 'successfully deploys'
+
+ context 'when pages_smart_check_outdated_sha feature flag is disabled' do
+ before do
+ stub_feature_flags(pages_smart_check_outdated_sha: false)
+ end
+
+ include_examples 'fails with outdated reference message'
+ end
+
+ context 'when old deployment present' do
+ before do
+ old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
+ old_deployment = create(:pages_deployment, ci_build: old_build, project: project)
+ project.update_pages_deployment!(old_deployment)
+ end
+
+ include_examples 'successfully deploys'
+
+ context 'when pages_smart_check_outdated_sha feature flag is disabled' do
+ before do
+ stub_feature_flags(pages_smart_check_outdated_sha: false)
+ end
+
+ include_examples 'fails with outdated reference message'
+ end
+ end
+
+ context 'when newer deployment present' do
+ before do
+ new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
+ new_build = create(:ci_build, pipeline: new_pipeline, ref: 'HEAD')
+ new_deployment = create(:pages_deployment, ci_build: new_build, project: project)
+ project.update_pages_deployment!(new_deployment)
+ end
+
+ include_examples 'fails with outdated reference message'
+ end
+ end
end
end
@@ -339,9 +396,15 @@ RSpec.describe Projects::UpdatePagesService do
create(:ci_job_artifact, :archive, file: file, job: build)
create(:ci_job_artifact, :metadata, file: metafile, job: build)
- allow(build).to receive(:artifacts_metadata_entry)
+ allow(build).to receive(:artifacts_metadata_entry).with('public/', recursive: true)
.and_return(metadata)
allow(metadata).to receive(:total_size).and_return(100)
+
+ # to pass entries count check
+ root_metadata = double('root metadata')
+ allow(build).to receive(:artifacts_metadata_entry).with('', recursive: true)
+ .and_return(root_metadata)
+ allow(root_metadata).to receive_message_chain(:entries, :count).and_return(10)
end
it 'raises an error' do
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index feb70ddaa46..f4a6d1b19e7 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -7,44 +7,21 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
let_it_be(:remote_project) { create(:forked_project_with_submodules) }
let_it_be(:remote_mirror) { create(:remote_mirror, project: project, enabled: true) }
- let(:remote_name) { remote_mirror.remote_name }
-
subject(:service) { described_class.new(project, project.creator) }
describe '#execute' do
let(:retries) { 0 }
- let(:inmemory) { true }
subject(:execute!) { service.execute(remote_mirror, retries) }
before do
- stub_feature_flags(update_remote_mirror_inmemory: inmemory)
project.repository.add_branch(project.owner, 'existing-branch', 'master')
allow(remote_mirror)
.to receive(:update_repository)
- .with(inmemory_remote: inmemory)
.and_return(double(divergent_refs: []))
end
- context 'with in-memory remote disabled' do
- let(:inmemory) { false }
-
- it 'ensures the remote exists' do
- expect(remote_mirror).to receive(:ensure_remote!)
-
- execute!
- end
- end
-
- context 'with in-memory remote enabled' do
- it 'does not ensure the remote exists' do
- expect(remote_mirror).not_to receive(:ensure_remote!)
-
- execute!
- end
- end
-
it 'does not fetch the remote repository' do
# See https://gitlab.com/gitlab-org/gitaly/-/issues/2670
expect(project.repository).not_to receive(:fetch_remote)
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index d7f5c39e457..a1b726071d6 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -2080,6 +2080,61 @@ RSpec.describe QuickActions::InterpretService do
end
end
end
+
+ context 'severity command' do
+ let_it_be_with_reload(:issuable) { create(:incident, project: project) }
+
+ subject(:set_severity) { service.execute(content, issuable) }
+
+ it_behaves_like 'failed command', 'No severity matches the provided parameter' do
+ let(:content) { '/severity something' }
+ end
+
+ shared_examples 'updates the severity' do |new_severity|
+ it do
+ expect { set_severity }.to change { issuable.severity }.from('unknown').to(new_severity)
+ end
+ end
+
+ context 'when quick action is used on creation' do
+ let(:content) { '/severity s3' }
+ let(:issuable) { build(:incident, project: project) }
+
+ it_behaves_like 'updates the severity', 'medium'
+
+ context 'issuable does not support severity' do
+ let(:issuable) { build(:issue, project: project) }
+
+ it_behaves_like 'failed command', ''
+ end
+ end
+
+ context 'severity given with S format' do
+ let(:content) { '/severity s3' }
+
+ it_behaves_like 'updates the severity', 'medium'
+ end
+
+ context 'severity given with number format' do
+ let(:content) { '/severity 3' }
+
+ it_behaves_like 'updates the severity', 'medium'
+ end
+
+ context 'severity given with text format' do
+ let(:content) { '/severity medium' }
+
+ it_behaves_like 'updates the severity', 'medium'
+ end
+
+ context 'an issuable that does not support severity' do
+ let_it_be_with_reload(:issuable) { create(:issue, project: project) }
+
+ it_behaves_like 'failed command', 'Could not apply severity command.' do
+ let(:content) { '/severity s3' }
+ end
+ end
+ end
end
describe '#explain' do
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index bf28fde3d90..7287825a0be 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -44,21 +44,6 @@ RSpec.describe Releases::CreateService do
it_behaves_like 'a successful release creation'
- context 'when tag is protected and user does not have access to it' do
- let!(:protected_tag) { create(:protected_tag, :no_one_can_create, name: '*', project: project) }
-
- it 'track the error event' do
- stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
-
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- kind_of(described_class::ReleaseProtectedTagAccessError),
- project_id: project.id,
- user_id: user.id)
-
- service.execute
- end
- end
-
context 'when the tag does not exist' do
let(:tag_name) { 'non-exist-tag' }
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index 38cdcef3825..bc5bff0b31d 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -28,21 +28,6 @@ RSpec.describe Releases::DestroyService do
it 'returns the destroyed object' do
is_expected.to include(status: :success, release: release)
end
-
- context 'when tag is protected and user does not have access to it' do
- let!(:protected_tag) { create(:protected_tag, :no_one_can_create, name: '*', project: project) }
-
- it 'track the error event' do
- stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
-
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- kind_of(described_class::ReleaseProtectedTagAccessError),
- project_id: project.id,
- user_id: user.id)
-
- service.execute
- end
- end
end
context 'when tag does not exist in the repository' do
diff --git a/spec/services/releases/update_service_spec.rb b/spec/services/releases/update_service_spec.rb
index 96b562a8071..932a7fab5ec 100644
--- a/spec/services/releases/update_service_spec.rb
+++ b/spec/services/releases/update_service_spec.rb
@@ -38,21 +38,6 @@ RSpec.describe Releases::UpdateService do
service.execute
end
- context 'when tag is protected and user does not have access to it' do
- let!(:protected_tag) { create(:protected_tag, :no_one_can_create, name: '*', project: project) }
-
- it 'track the error event' do
- stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
-
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- kind_of(described_class::ReleaseProtectedTagAccessError),
- project_id: project.id,
- user_id: user.id)
-
- service.execute
- end
- end
-
context 'when the tag does not exists' do
let(:tag_name) { 'foobar' }
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 012168ef719..b987e3204ad 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:removed) { [labels[1]] }
it 'creates all label events in a single query' do
- expect(Gitlab::Database).to receive(:bulk_insert).once.and_call_original
+ expect(Gitlab::Database.main).to receive(:bulk_insert).once.and_call_original
expect { subject }.to change { resource.resource_label_events.count }.from(0).to(2)
end
end
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
new file mode 100644
index 00000000000..120ce12aa58
--- /dev/null
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -0,0 +1,260 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# rubocop: disable RSpec/MultipleMemoizedHelpers
+RSpec.describe Security::MergeReportsService, '#execute' do
+ let(:scanner_1) { build(:ci_reports_security_scanner, external_id: 'scanner-1', name: 'Scanner 1') }
+ let(:scanner_2) { build(:ci_reports_security_scanner, external_id: 'scanner-2', name: 'Scanner 2') }
+ let(:scanner_3) { build(:ci_reports_security_scanner, external_id: 'scanner-3', name: 'Scanner 3') }
+
+ let(:identifier_1_primary) { build(:ci_reports_security_identifier, external_id: 'VULN-1', external_type: 'scanner-1') }
+ let(:identifier_1_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-123', external_type: 'cve') }
+ let(:identifier_2_primary) { build(:ci_reports_security_identifier, external_id: 'VULN-2', external_type: 'scanner-2') }
+ let(:identifier_2_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-456', external_type: 'cve') }
+ let(:identifier_cwe) { build(:ci_reports_security_identifier, external_id: '789', external_type: 'cwe') }
+ let(:identifier_wasc) { build(:ci_reports_security_identifier, external_id: '13', external_type: 'wasc') }
+
+ let(:finding_id_1) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_1_primary, identifier_1_cve],
+ scanner: scanner_1,
+ severity: :low
+ )
+ end
+
+ let(:finding_id_1_extra) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_1_primary, identifier_1_cve],
+ scanner: scanner_1,
+ severity: :low
+ )
+ end
+
+ let(:finding_id_2_loc_1) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
+ scanner: scanner_2,
+ severity: :medium
+ )
+ end
+
+ let(:finding_id_2_loc_1_extra) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
+ scanner: scanner_2,
+ severity: :medium
+ )
+ end
+
+ let(:finding_id_2_loc_2) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
+ scanner: scanner_2,
+ severity: :medium
+ )
+ end
+
+ let(:finding_cwe_1) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_cwe],
+ scanner: scanner_3,
+ severity: :high
+ )
+ end
+
+ let(:finding_cwe_2) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_cwe],
+ scanner: scanner_1,
+ severity: :critical
+ )
+ end
+
+ let(:finding_wasc_1) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_wasc],
+ scanner: scanner_1,
+ severity: :medium
+ )
+ end
+
+ let(:finding_wasc_2) do
+ build(:ci_reports_security_finding,
+ identifiers: [identifier_wasc],
+ scanner: scanner_2,
+ severity: :critical
+ )
+ end
+
+ let(:report_1_findings) { [finding_id_1, finding_id_2_loc_1, finding_id_2_loc_1_extra, finding_cwe_2, finding_wasc_1] }
+
+ let(:scanned_resource) do
+ ::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com'), 'GET')
+ end
+
+ let(:scanned_resource_1) do
+ ::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com'), 'POST')
+ end
+
+ let(:scanned_resource_2) do
+ ::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com/2'), 'GET')
+ end
+
+ let(:scanned_resource_3) do
+ ::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse('example.com/3'), 'GET')
+ end
+
+ let(:report_1) do
+ build(
+ :ci_reports_security_report,
+ scanners: [scanner_1, scanner_2],
+ findings: report_1_findings,
+ identifiers: report_1_findings.flat_map(&:identifiers),
+ scanned_resources: [scanned_resource, scanned_resource_1, scanned_resource_2]
+ )
+ end
+
+ let(:report_2_findings) { [finding_id_2_loc_2, finding_wasc_2] }
+
+ let(:report_2) do
+ build(
+ :ci_reports_security_report,
+ scanners: [scanner_2],
+ findings: report_2_findings,
+ identifiers: finding_id_2_loc_2.identifiers,
+ scanned_resources: [scanned_resource, scanned_resource_1, scanned_resource_3]
+ )
+ end
+
+ let(:report_3_findings) { [finding_id_1_extra, finding_cwe_1] }
+
+ let(:report_3) do
+ build(
+ :ci_reports_security_report,
+ scanners: [scanner_1, scanner_3],
+ findings: report_3_findings,
+ identifiers: report_3_findings.flat_map(&:identifiers)
+ )
+ end
+
+ let(:merge_service) { described_class.new(report_1, report_2, report_3) }
+
+ subject(:merged_report) { merge_service.execute }
+
+ describe 'errors on target report' do
+ subject { merged_report.errors }
+
+ before do
+ report_1.add_error('foo', 'bar')
+ report_2.add_error('zoo', 'baz')
+ end
+
+ it { is_expected.to eq([{ type: 'foo', message: 'bar' }, { type: 'zoo', message: 'baz' }]) }
+ end
+
+ it 'copies scanners into target report and eliminates duplicates' do
+ expect(merged_report.scanners.values).to contain_exactly(scanner_1, scanner_2, scanner_3)
+ end
+
+ it 'copies identifiers into target report and eliminates duplicates' do
+ expect(merged_report.identifiers.values).to(
+ contain_exactly(
+ identifier_1_primary,
+ identifier_1_cve,
+ identifier_2_primary,
+ identifier_2_cve,
+ identifier_cwe,
+ identifier_wasc
+ )
+ )
+ end
+
+ it 'deduplicates (except cwe and wasc) and sorts the vulnerabilities by severity (desc) then by compare key' do
+ expect(merged_report.findings).to(
+ eq([
+ finding_cwe_2,
+ finding_wasc_2,
+ finding_cwe_1,
+ finding_id_2_loc_2,
+ finding_id_2_loc_1,
+ finding_wasc_1,
+ finding_id_1
+ ])
+ )
+ end
+
+ it 'deduplicates scanned resources' do
+ expect(merged_report.scanned_resources).to(
+ eq([
+ scanned_resource,
+ scanned_resource_1,
+ scanned_resource_2,
+ scanned_resource_3
+ ])
+ )
+ end
+
+ context 'ordering reports for sast analyzers' do
+ let(:bandit_scanner) { build(:ci_reports_security_scanner, external_id: 'bandit', name: 'Bandit') }
+ let(:semgrep_scanner) { build(:ci_reports_security_scanner, external_id: 'semgrep', name: 'Semgrep') }
+
+ let(:identifier_bandit) { build(:ci_reports_security_identifier, external_id: 'B403', external_type: 'bandit_test_id') }
+ let(:identifier_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-123', external_type: 'cve') }
+ let(:identifier_semgrep) { build(:ci_reports_security_identifier, external_id: 'rules.bandit.B105', external_type: 'semgrep_id') }
+
+ let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) }
+ let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) }
+ let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast ) }
+
+ let(:bandit_report) do
+ build( :ci_reports_security_report,
+ type: :sast,
+ scanners: [bandit_scanner],
+ findings: [finding_id_1],
+ identifiers: finding_id_1.identifiers
+ )
+ end
+
+ let(:semgrep_report) do
+ build(
+ :ci_reports_security_report,
+ type: :sast,
+ scanners: [semgrep_scanner],
+ findings: [finding_id_2, finding_id_3],
+ identifiers: finding_id_2.identifiers + finding_id_3.identifiers
+ )
+ end
+
+ let(:custom_analyzer_report) do
+ build(
+ :ci_reports_security_report,
+ type: :sast,
+ scanners: [scanner_2],
+ findings: [finding_id_2_loc_1],
+ identifiers: finding_id_2_loc_1.identifiers
+ )
+ end
+
+ context 'when reports are gathered in an unprioritized order' do
+ subject(:sast_merged_report) { described_class.new(semgrep_report, bandit_report).execute }
+
+ specify { expect(sast_merged_report.scanners.values).to eql([bandit_scanner, semgrep_scanner]) }
+ specify { expect(sast_merged_report.findings.count).to eq(2) }
+ specify { expect(sast_merged_report.findings.first.identifiers).to eql([identifier_bandit, identifier_cve]) }
+ specify { expect(sast_merged_report.findings.last.identifiers).to contain_exactly(identifier_semgrep) }
+ end
+
+ context 'when a custom analyzer is completed before the known analyzers' do
+ subject(:sast_merged_report) { described_class.new(custom_analyzer_report, semgrep_report, bandit_report).execute }
+
+ specify { expect(sast_merged_report.scanners.values).to eql([bandit_scanner, semgrep_scanner, scanner_2]) }
+ specify { expect(sast_merged_report.findings.count).to eq(3) }
+ specify { expect(sast_merged_report.findings.last.identifiers).to match_array(finding_id_2_loc_1.identifiers) }
+ end
+ end
+end
+# rubocop: enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/services/service_ping/permit_data_categories_service_spec.rb b/spec/services/service_ping/permit_data_categories_service_spec.rb
index 4fd5c6f9ccb..550c0ea5e13 100644
--- a/spec/services/service_ping/permit_data_categories_service_spec.rb
+++ b/spec/services/service_ping/permit_data_categories_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe ServicePing::PermitDataCategoriesService do
- using RSpec::Parameterized::TableSyntax
-
describe '#execute', :without_license do
subject(:permitted_categories) { described_class.new.execute }
@@ -15,7 +13,7 @@ RSpec.describe ServicePing::PermitDataCategoriesService do
end
it 'returns all categories' do
- expect(permitted_categories).to match_array(%w[Standard Subscription Operational Optional])
+ expect(permitted_categories).to match_array(%w[standard subscription operational optional])
end
end
@@ -41,27 +39,4 @@ RSpec.describe ServicePing::PermitDataCategoriesService do
end
end
end
-
- describe '#product_intelligence_enabled?' do
- where(:usage_ping_enabled, :requires_usage_stats_consent, :expected_product_intelligence_enabled) do
- # Usage ping enabled
- true | false | true
- true | true | false
-
- # Usage ping disabled
- false | false | false
- false | true | false
- end
-
- with_them do
- before do
- allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: requires_usage_stats_consent))
- stub_config_setting(usage_ping_enabled: usage_ping_enabled)
- end
-
- it 'has the correct product_intelligence_enabled?' do
- expect(described_class.new.product_intelligence_enabled?).to eq(expected_product_intelligence_enabled)
- end
- end
- end
end
diff --git a/spec/services/service_ping/service_ping_settings_spec.rb b/spec/services/service_ping/service_ping_settings_spec.rb
new file mode 100644
index 00000000000..90a5c6b30eb
--- /dev/null
+++ b/spec/services/service_ping/service_ping_settings_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ServicePing::ServicePingSettings do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#product_intelligence_enabled?' do
+ where(:usage_ping_enabled, :requires_usage_stats_consent, :expected_product_intelligence_enabled) do
+ # Usage ping enabled
+ true | false | true
+ true | true | false
+
+ # Usage ping disabled
+ false | false | false
+ false | true | false
+ end
+
+ with_them do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: requires_usage_stats_consent))
+ stub_config_setting(usage_ping_enabled: usage_ping_enabled)
+ end
+
+ it 'has the correct product_intelligence_enabled?' do
+ expect(described_class.product_intelligence_enabled?).to eq(expected_product_intelligence_enabled)
+ end
+ end
+ end
+
+ describe '#enabled?' do
+ describe 'has the correct enabled' do
+ it 'when false' do
+ stub_config_setting(usage_ping_enabled: false)
+
+ expect(described_class.enabled?).to eq(false)
+ end
+
+ it 'when true' do
+ stub_config_setting(usage_ping_enabled: true)
+
+ expect(described_class.enabled?).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 8a3065e6bc6..05df4e49014 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -100,9 +100,7 @@ RSpec.describe ServicePing::SubmitService do
context 'when product_intelligence_enabled is false' do
before do
- allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |service|
- allow(service).to receive(:product_intelligence_enabled?).and_return(false)
- end
+ allow(ServicePing::ServicePingSettings).to receive(:product_intelligence_enabled?).and_return(false)
end
it_behaves_like 'does not run'
@@ -112,9 +110,7 @@ RSpec.describe ServicePing::SubmitService do
before do
stub_usage_data_connections
- allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |service|
- allow(service).to receive(:product_intelligence_enabled?).and_return(true)
- end
+ allow(ServicePing::ServicePingSettings).to receive(:product_intelligence_enabled?).and_return(true)
end
it 'generates service ping' do
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 986b26e67d7..082ee4ddc67 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-ActiveSupport::Dependencies.autoload_paths << 'app/services'
+require_relative '../../app/services/service_response'
RSpec.describe ServiceResponse do
describe '.success' do
diff --git a/spec/services/spam/mark_as_spam_service_spec.rb b/spec/services/spam/akismet_mark_as_spam_service_spec.rb
index 308a66c3a48..12666e23e47 100644
--- a/spec/services/spam/mark_as_spam_service_spec.rb
+++ b/spec/services/spam/akismet_mark_as_spam_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Spam::MarkAsSpamService do
+RSpec.describe Spam::AkismetMarkAsSpamService do
let(:user_agent_detail) { build(:user_agent_detail) }
let(:spammable) { build(:issue, user_agent_detail: user_agent_detail) }
let(:fake_akismet_service) { double(:akismet_service, submit_spam: true) }
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 3a92e5acb5a..8ddfa7ed3a0 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Spam::SpamActionService do
include_context 'includes Spam constants'
- let(:issue) { create(:issue, project: project, author: user) }
+ let(:issue) { create(:issue, project: project, author: author) }
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
let(:fake_referer) { 'fake-http-referer' }
@@ -23,6 +23,7 @@ RSpec.describe Spam::SpamActionService do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
+ let_it_be(:author) { create(:user) }
before do
issue.spam = false
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index e9bd40b058b..5aff5149dcf 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -745,7 +745,7 @@ RSpec.describe SystemNoteService do
end
describe '.new_alert_issue' do
- let(:alert) { build(:alert_management_alert, :with_issue) }
+ let(:alert) { build(:alert_management_alert, :with_incident) }
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index 1c36a4036cc..6e6bfeaa205 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ::SystemNotes::AlertManagementService do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:noteable) { create(:alert_management_alert, :with_issue, :acknowledged, project: project) }
+ let_it_be(:noteable) { create(:alert_management_alert, :with_incident, :acknowledged, project: project) }
describe '#create_new_alert' do
subject { described_class.new(noteable: noteable, project: project).create_new_alert('Some Service') }
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index cfafa9eff45..6c1df5c745f 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -85,19 +85,14 @@ RSpec.describe Users::ActivityService do
end
end
- context 'with DB Load Balancing', :request_store, :redis, :clean_gitlab_redis_shared_state do
- include_context 'clear DB Load Balancing configuration'
-
+ context 'with DB Load Balancing' do
let(:user) { create(:user, last_activity_on: last_activity_on) }
context 'when last activity is in the past' do
let(:user) { create(:user, last_activity_on: Date.today - 1.week) }
- context 'database load balancing is configured' do
+ context 'database load balancing is configured', :db_load_balancing do
before do
- # Do not pollute AR for other tests, but rather simulate effect of configure_proxy.
- allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
- ::Gitlab::Database::LoadBalancing.configure_proxy
allow(ActiveRecord::Base).to receive(:connection).and_return(::Gitlab::Database::LoadBalancing.proxy)
end
diff --git a/spec/services/users/ban_service_spec.rb b/spec/services/users/ban_service_spec.rb
index 0e6ac615da5..6f49ee08782 100644
--- a/spec/services/users/ban_service_spec.rb
+++ b/spec/services/users/ban_service_spec.rb
@@ -3,47 +3,68 @@
require 'spec_helper'
RSpec.describe Users::BanService do
- let(:current_user) { create(:admin) }
+ let(:user) { create(:user) }
- subject(:service) { described_class.new(current_user) }
+ let_it_be(:current_user) { create(:admin) }
- describe '#execute' do
- subject(:operation) { service.execute(user) }
+ shared_examples 'does not modify the BannedUser record or user state' do
+ it 'does not modify the BannedUser record or user state' do
+ expect { ban_user }.not_to change { Users::BannedUser.count }
+ expect { ban_user }.not_to change { user.state }
+ end
+ end
- context 'when successful' do
- let(:user) { create(:user) }
+ context 'ban', :aggregate_failures do
+ subject(:ban_user) { described_class.new(current_user).execute(user) }
- it { is_expected.to eq(status: :success) }
+ context 'when successful', :enable_admin_mode do
+ it 'returns success status' do
+ response = ban_user
- it "bans the user" do
- expect { operation }.to change { user.state }.to('banned')
+ expect(response[:status]).to eq(:success)
end
- it "blocks the user" do
- expect { operation }.to change { user.blocked? }.from(false).to(true)
+ it 'bans the user' do
+ expect { ban_user }.to change { user.state }.from('active').to('banned')
end
- it 'logs ban in application logs' do
- allow(Gitlab::AppLogger).to receive(:info)
+ it 'creates a BannedUser' do
+ expect { ban_user }.to change { Users::BannedUser.count }.by(1)
+ expect(Users::BannedUser.last.user_id).to eq(user.id)
+ end
- operation
+ it 'logs ban in application logs' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: "User ban", user: "#{user.username}", email: "#{user.email}", ban_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
- expect(Gitlab::AppLogger).to have_received(:info).with(message: "User banned", user: "#{user.username}", email: "#{user.email}", banned_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+ ban_user
end
end
context 'when failed' do
- let(:user) { create(:user, :blocked) }
+ context 'when user is blocked', :enable_admin_mode do
+ before do
+ user.block!
+ end
- it 'returns error result' do
- aggregate_failures 'error result' do
- expect(operation[:status]).to eq(:error)
- expect(operation[:message]).to match(/State cannot transition/)
+ it 'returns state error message' do
+ response = ban_user
+
+ expect(response[:status]).to eq(:error)
+ expect(response[:message]).to match(/State cannot transition/)
end
+
+ it_behaves_like 'does not modify the BannedUser record or user state'
end
- it "does not change the user's state" do
- expect { operation }.not_to change { user.state }
+ context 'when user is not an admin' do
+ it 'returns permissions error message' do
+ response = ban_user
+
+ expect(response[:status]).to eq(:error)
+ expect(response[:message]).to match(/You are not allowed to ban a user/)
+ end
+
+ it_behaves_like 'does not modify the BannedUser record or user state'
end
end
end
diff --git a/spec/services/users/banned_user_base_service_spec.rb b/spec/services/users/banned_user_base_service_spec.rb
new file mode 100644
index 00000000000..29a549f0f49
--- /dev/null
+++ b/spec/services/users/banned_user_base_service_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::BannedUserBaseService do
+ let(:admin) { create(:admin) }
+ let(:base_service) { described_class.new(admin) }
+
+ describe '#initialize' do
+ it 'sets the current_user instance value' do
+ expect(base_service.instance_values["current_user"]).to eq(admin)
+ end
+ end
+end
diff --git a/spec/services/users/set_status_service_spec.rb b/spec/services/users/set_status_service_spec.rb
index 2c776a0eeb4..76e86506d94 100644
--- a/spec/services/users/set_status_service_spec.rb
+++ b/spec/services/users/set_status_service_spec.rb
@@ -8,6 +8,18 @@ RSpec.describe Users::SetStatusService do
subject(:service) { described_class.new(current_user, params) }
describe '#execute' do
+ shared_examples_for 'bumps user' do
+ it 'bumps User#updated_at' do
+ expect { service.execute }.to change { current_user.updated_at }
+ end
+ end
+
+ shared_examples_for 'does not bump user' do
+ it 'does not bump User#updated_at' do
+ expect { service.execute }.not_to change { current_user.updated_at }
+ end
+ end
+
context 'when params are set' do
let(:params) { { emoji: 'taurus', message: 'a random status', availability: 'busy' } }
@@ -31,6 +43,8 @@ RSpec.describe Users::SetStatusService do
expect(service.execute).to be(true)
end
+ it_behaves_like 'bumps user'
+
context 'when setting availability to not_set' do
before do
params[:availability] = 'not_set'
@@ -72,6 +86,8 @@ RSpec.describe Users::SetStatusService do
it 'does not update the status if the current user is not allowed' do
expect { service.execute }.not_to change { target_user.status }
end
+
+ it_behaves_like 'does not bump user'
end
end
@@ -79,20 +95,28 @@ RSpec.describe Users::SetStatusService do
let(:params) { {} }
shared_examples 'removes user status record' do
- it 'deletes the status' do
- status = create(:user_status, user: current_user)
-
+ it 'deletes the user status record' do
expect { service.execute }
- .to change { current_user.reload.status }.from(status).to(nil)
+ .to change { current_user.reload.status }.from(user_status).to(nil)
end
- end
- it_behaves_like 'removes user status record'
+ it_behaves_like 'bumps user'
+ end
- context 'when not_set is given for availability' do
- let(:params) { { availability: 'not_set' } }
+ context 'when user has existing user status record' do
+ let!(:user_status) { create(:user_status, user: current_user) }
it_behaves_like 'removes user status record'
+
+ context 'when not_set is given for availability' do
+ let(:params) { { availability: 'not_set' } }
+
+ it_behaves_like 'removes user status record'
+ end
+ end
+
+ context 'when user has no existing user status record' do
+ it_behaves_like 'does not bump user'
end
end
end
diff --git a/spec/services/users/unban_service_spec.rb b/spec/services/users/unban_service_spec.rb
new file mode 100644
index 00000000000..b2b3140ccb3
--- /dev/null
+++ b/spec/services/users/unban_service_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UnbanService do
+ let(:user) { create(:user) }
+
+ let_it_be(:current_user) { create(:admin) }
+
+ shared_examples 'does not modify the BannedUser record or user state' do
+ it 'does not modify the BannedUser record or user state' do
+ expect { unban_user }.not_to change { Users::BannedUser.count }
+ expect { unban_user }.not_to change { user.state }
+ end
+ end
+
+ context 'unban', :aggregate_failures do
+ subject(:unban_user) { described_class.new(current_user).execute(user) }
+
+ context 'when successful', :enable_admin_mode do
+ before do
+ user.ban!
+ end
+
+ it 'returns success status' do
+ response = unban_user
+
+ expect(response[:status]).to eq(:success)
+ end
+
+ it 'unbans the user' do
+ expect { unban_user }.to change { user.state }.from('banned').to('active')
+ end
+
+ it 'removes the BannedUser' do
+ expect { unban_user }.to change { Users::BannedUser.count }.by(-1)
+ expect(user.reload.banned_user).to be_nil
+ end
+
+ it 'logs unban in application logs' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: "User unban", user: "#{user.username}", email: "#{user.email}", unban_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+
+ unban_user
+ end
+ end
+
+ context 'when failed' do
+ context 'when user is already active', :enable_admin_mode do
+ it 'returns state error message' do
+ response = unban_user
+
+ expect(response[:status]).to eq(:error)
+ expect(response[:message]).to match(/State cannot transition/)
+ end
+
+ it_behaves_like 'does not modify the BannedUser record or user state'
+ end
+
+ context 'when user is not an admin' do
+ before do
+ user.ban!
+ end
+
+ it 'returns permissions error message' do
+ response = unban_user
+
+ expect(response[:status]).to eq(:error)
+ expect(response[:message]).to match(/You are not allowed to unban a user/)
+ end
+
+ it_behaves_like 'does not modify the BannedUser record or user state'
+ end
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index d339ac67810..b95b7fad5a0 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -230,7 +230,7 @@ RSpec.configure do |config|
# We can't use an `around` hook here because the wrapping transaction
# is not yet opened at the time that is triggered
config.prepend_before do
- Gitlab::Database.set_open_transactions_baseline
+ Gitlab::Database.main.set_open_transactions_baseline
end
config.append_before do
@@ -238,7 +238,7 @@ RSpec.configure do |config|
end
config.append_after do
- Gitlab::Database.reset_open_transactions_baseline
+ Gitlab::Database.main.reset_open_transactions_baseline
end
config.before do |example|
diff --git a/spec/support/before_all_adapter.rb b/spec/support/before_all_adapter.rb
new file mode 100644
index 00000000000..f48e0f46e80
--- /dev/null
+++ b/spec/support/before_all_adapter.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class BeforeAllAdapter # rubocop:disable Gitlab/NamespacedClass
+ def self.all_connection_pools
+ ::ActiveRecord::Base.connection_handler.all_connection_pools
+ end
+
+ def self.begin_transaction
+ self.all_connection_pools.each do |connection_pool|
+ connection_pool.connection.begin_transaction(joinable: false)
+ end
+ end
+
+ def self.rollback_transaction
+ self.all_connection_pools.each do |connection_pool|
+ if connection_pool.connection.open_transactions.zero?
+ warn "!!! before_all transaction has been already rollbacked and " \
+ "could work incorrectly"
+ next
+ end
+
+ connection_pool.connection.rollback_transaction
+ end
+ end
+end
+
+TestProf::BeforeAll.adapter = ::BeforeAllAdapter
diff --git a/spec/support/database/ci_tables.rb b/spec/support/database/ci_tables.rb
new file mode 100644
index 00000000000..99fc7ac2501
--- /dev/null
+++ b/spec/support/database/ci_tables.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+# This module stores the CI-related database tables which are
+# going to be moved to a separate database.
+module Database
+ module CiTables
+ def self.include?(name)
+ ci_tables.include?(name)
+ end
+
+ def self.ci_tables
+ @@ci_tables ||= Set.new.tap do |tables| # rubocop:disable Style/ClassVars
+ tables.merge(Ci::ApplicationRecord.descendants.map(&:table_name).compact)
+
+ # It was decided that taggings/tags are best placed with CI
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/333413
+ tables.add('taggings')
+ tables.add('tags')
+ end
+ end
+ end
+end
diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb
new file mode 100644
index 00000000000..460ee99391b
--- /dev/null
+++ b/spec/support/database/prevent_cross_database_modification.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+module Database
+ module PreventCrossDatabaseModification
+ CrossDatabaseModificationAcrossUnsupportedTablesError = Class.new(StandardError)
+
+ module GitlabDatabaseMixin
+ def allow_cross_database_modification_within_transaction(url:)
+ cross_database_context = Database::PreventCrossDatabaseModification.cross_database_context
+ return yield unless cross_database_context && cross_database_context[:enabled]
+
+ transaction_tracker_enabled_was = cross_database_context[:enabled]
+ cross_database_context[:enabled] = false
+
+ yield
+ ensure
+ cross_database_context[:enabled] = transaction_tracker_enabled_was if cross_database_context
+ end
+ end
+
+ module SpecHelpers
+ def with_cross_database_modification_prevented
+ subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |name, start, finish, id, payload|
+ PreventCrossDatabaseModification.prevent_cross_database_modification!(payload[:connection], payload[:sql])
+ end
+
+ PreventCrossDatabaseModification.reset_cross_database_context!
+ PreventCrossDatabaseModification.cross_database_context.merge!(enabled: true, subscriber: subscriber)
+
+ yield if block_given?
+ ensure
+ cleanup_with_cross_database_modification_prevented if block_given?
+ end
+
+ def cleanup_with_cross_database_modification_prevented
+ ActiveSupport::Notifications.unsubscribe(PreventCrossDatabaseModification.cross_database_context[:subscriber])
+ PreventCrossDatabaseModification.cross_database_context[:enabled] = false
+ end
+ end
+
+ def self.cross_database_context
+ Thread.current[:transaction_tracker]
+ end
+
+ def self.reset_cross_database_context!
+ Thread.current[:transaction_tracker] = initial_data
+ end
+
+ def self.initial_data
+ {
+ enabled: false,
+ transaction_depth_by_db: Hash.new { |h, k| h[k] = 0 },
+ modified_tables_by_db: Hash.new { |h, k| h[k] = Set.new }
+ }
+ end
+
+ def self.prevent_cross_database_modification!(connection, sql)
+ return unless cross_database_context[:enabled]
+
+ database = connection.pool.db_config.name
+
+ if sql.start_with?('SAVEPOINT')
+ cross_database_context[:transaction_depth_by_db][database] += 1
+
+ return
+ elsif sql.start_with?('RELEASE SAVEPOINT', 'ROLLBACK TO SAVEPOINT')
+ cross_database_context[:transaction_depth_by_db][database] -= 1
+ if cross_database_context[:transaction_depth_by_db][database] <= 0
+ cross_database_context[:modified_tables_by_db][database].clear
+ end
+
+ return
+ end
+
+ return if cross_database_context[:transaction_depth_by_db].values.all?(&:zero?)
+
+ tables = PgQuery.parse(sql).dml_tables
+
+ return if tables.empty?
+
+ cross_database_context[:modified_tables_by_db][database].merge(tables)
+
+ all_tables = cross_database_context[:modified_tables_by_db].values.map(&:to_a).flatten
+
+ unless PreventCrossJoins.only_ci_or_only_main?(all_tables)
+ raise Database::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError,
+ "Cross-database data modification queries (CI and Main) were detected within " \
+ "a transaction '#{all_tables.join(", ")}' discovered"
+ end
+ end
+ end
+end
+
+Gitlab::Database.singleton_class.prepend(
+ Database::PreventCrossDatabaseModification::GitlabDatabaseMixin)
+
+RSpec.configure do |config|
+ config.include(::Database::PreventCrossDatabaseModification::SpecHelpers)
+
+ # Using before and after blocks because the around block causes problems with the let_it_be
+ # record creations. It makes an extra savepoint which breaks the transaction count logic.
+ config.before(:each, :prevent_cross_database_modification) do
+ with_cross_database_modification_prevented
+ end
+
+ config.after(:each, :prevent_cross_database_modification) do
+ cleanup_with_cross_database_modification_prevented
+ end
+end
diff --git a/spec/support/database/prevent_cross_joins.rb b/spec/support/database/prevent_cross_joins.rb
new file mode 100644
index 00000000000..789721ccd38
--- /dev/null
+++ b/spec/support/database/prevent_cross_joins.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+# This module tries to discover and prevent cross-joins across tables
+# This will forbid usage of tables between CI and main database
+# on a same query unless explicitly allowed by. This will change execution
+# from a given point to allow cross-joins. The state will be cleared
+# on a next test run.
+#
+# This method should be used to mark METHOD introducing cross-join
+# not a test using the cross-join.
+#
+# class User
+# def ci_owned_runners
+# ::Gitlab::Database.allow_cross_joins_across_databases!(url: link-to-issue-url)
+#
+# ...
+# end
+# end
+
+module Database
+ module PreventCrossJoins
+ CrossJoinAcrossUnsupportedTablesError = Class.new(StandardError)
+
+ def self.validate_cross_joins!(sql)
+ return if Thread.current[:allow_cross_joins_across_databases]
+
+ # PgQuery might fail in some cases due to limited nesting:
+ # https://github.com/pganalyze/pg_query/issues/209
+ tables = PgQuery.parse(sql).tables
+
+ unless only_ci_or_only_main?(tables)
+ raise CrossJoinAcrossUnsupportedTablesError,
+ "Unsupported cross-join across '#{tables.join(", ")}' discovered " \
+ "when executing query '#{sql}'"
+ end
+ end
+
+ # Returns true if a set includes only CI tables, or includes only non-CI tables
+ def self.only_ci_or_only_main?(tables)
+ tables.all? { |table| CiTables.include?(table) } ||
+ tables.none? { |table| CiTables.include?(table) }
+ end
+
+ module SpecHelpers
+ def with_cross_joins_prevented
+ subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event|
+ ::Database::PreventCrossJoins.validate_cross_joins!(event.payload[:sql])
+ end
+
+ Thread.current[:allow_cross_joins_across_databases] = false
+
+ yield
+ ensure
+ ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
+ end
+ end
+
+ module GitlabDatabaseMixin
+ def allow_cross_joins_across_databases(url:)
+ Thread.current[:allow_cross_joins_across_databases] = true
+ super
+ end
+ end
+ end
+end
+
+Gitlab::Database.singleton_class.prepend(
+ Database::PreventCrossJoins::GitlabDatabaseMixin)
+
+RSpec.configure do |config|
+ config.include(::Database::PreventCrossJoins::SpecHelpers)
+
+ # TODO: remove `:prevent_cross_joins` to enable the check by default
+ config.around(:each, :prevent_cross_joins) do |example|
+ with_cross_joins_prevented { example.run }
+ end
+end
diff --git a/spec/support/database_cleaner.rb b/spec/support/database_cleaner.rb
index f6339d7343c..6a0e398daa1 100644
--- a/spec/support/database_cleaner.rb
+++ b/spec/support/database_cleaner.rb
@@ -5,10 +5,12 @@ require_relative 'db_cleaner'
RSpec.configure do |config|
include DbCleaner
- # Ensure all sequences are reset at the start of the suite run
+ # Ensure the database is empty at the start of the suite run with :deletion strategy
+ # neither the sequence is reset nor the tables are vacuum, but this provides
+ # better I/O performance on machines with slower storage
config.before(:suite) do
setup_database_cleaner
- DatabaseCleaner.clean_with(:truncation)
+ DatabaseCleaner.clean_with(:deletion)
end
config.append_after(:context, :migration) do
diff --git a/spec/support/database_load_balancing.rb b/spec/support/database_load_balancing.rb
new file mode 100644
index 00000000000..03fa7886295
--- /dev/null
+++ b/spec/support/database_load_balancing.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before(:each, :db_load_balancing) do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+
+ proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new([Gitlab::Database.main.config['host']])
+
+ allow(ActiveRecord::Base).to receive(:load_balancing_proxy).and_return(proxy)
+
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
+ redis_shared_state_cleanup!
+ end
+
+ config.after(:each, :db_load_balancing) do
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
+ redis_shared_state_cleanup!
+ end
+end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index ff913ebf22b..155dc3c17d9 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
module DbCleaner
+ def all_connection_classes
+ ::ActiveRecord::Base.connection_handler.connection_pool_names.map(&:constantize)
+ end
+
def delete_from_all_tables!(except: [])
except << 'ar_internal_metadata'
@@ -12,7 +16,9 @@ module DbCleaner
end
def setup_database_cleaner
- DatabaseCleaner[:active_record, { connection: ActiveRecord::Base }]
+ all_connection_classes.each do |connection_class|
+ DatabaseCleaner[:active_record, { connection: connection_class }]
+ end
end
end
diff --git a/spec/support/enable_multiple_database_metrics_by_default.rb b/spec/support/enable_multiple_database_metrics_by_default.rb
new file mode 100644
index 00000000000..6eeb4acd3d6
--- /dev/null
+++ b/spec/support/enable_multiple_database_metrics_by_default.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before do
+ # Enable this by default in all tests so it behaves like a FF
+ stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', '1')
+ end
+end
diff --git a/spec/support/helpers/board_helpers.rb b/spec/support/helpers/board_helpers.rb
index c4e69d06f52..d7277ba9a20 100644
--- a/spec/support/helpers/board_helpers.rb
+++ b/spec/support/helpers/board_helpers.rb
@@ -23,4 +23,21 @@ module BoardHelpers
wait_for_requests
end
end
+
+ def drag(selector: '.board-list', list_from_index: 0, from_index: 0, to_index: 0, list_to_index: 0, perform_drop: true)
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ # ensure there is enough horizontal space for four board lists
+ resize_window(2000, 800)
+
+ drag_to(selector: selector,
+ scrollable: '#board-app',
+ list_from_index: list_from_index,
+ from_index: from_index,
+ to_index: to_index,
+ list_to_index: list_to_index,
+ perform_drop: perform_drop)
+ end
+
+ wait_for_requests
+ end
end
diff --git a/spec/support/helpers/dependency_proxy_helpers.rb b/spec/support/helpers/dependency_proxy_helpers.rb
index 0d8f56906e3..9413cb93199 100644
--- a/spec/support/helpers/dependency_proxy_helpers.rb
+++ b/spec/support/helpers/dependency_proxy_helpers.rb
@@ -34,7 +34,8 @@ module DependencyProxyHelpers
def build_jwt(user = nil, expire_time: nil)
JSONWebToken::HMACToken.new(::Auth::DependencyProxyAuthenticationService.secret).tap do |jwt|
- jwt['user_id'] = user.id if user
+ jwt['user_id'] = user.id if user.is_a?(User)
+ jwt['deploy_token'] = user.token if user.is_a?(DeployToken)
jwt.expire_time = expire_time || jwt.issued_at + 1.minute
end
end
diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb
index 7b8cd6963c0..69ba20c1ca4 100644
--- a/spec/support/helpers/features/invite_members_modal_helper.rb
+++ b/spec/support/helpers/features/invite_members_modal_helper.rb
@@ -5,7 +5,7 @@ module Spec
module Helpers
module Features
module InviteMembersModalHelper
- def invite_member(name, role: 'Guest', expires_at: nil)
+ def invite_member(name, role: 'Guest', expires_at: nil, area_of_focus: false)
click_on 'Invite members'
page.within '#invite-members-modal' do
@@ -14,6 +14,7 @@ module Spec
wait_for_requests
click_button name
choose_options(role, expires_at)
+ choose_area_of_focus if area_of_focus
click_button 'Invite'
@@ -41,7 +42,14 @@ module Spec
click_button role
end
- fill_in 'YYYY-MM-DD', with: expires_at.try(:strftime, '%Y-%m-%d')
+ fill_in 'YYYY-MM-DD', with: expires_at.strftime('%Y-%m-%d') if expires_at
+ end
+
+ def choose_area_of_focus
+ page.within '[data-testid="area-of-focus-checks"]' do
+ check 'Contribute to the codebase'
+ check 'Collaborate on open issues and merge requests'
+ end
end
end
end
diff --git a/spec/support/helpers/features/top_nav_spec_helpers.rb b/spec/support/helpers/features/top_nav_spec_helpers.rb
index 87ed897ec74..de495eceabc 100644
--- a/spec/support/helpers/features/top_nav_spec_helpers.rb
+++ b/spec/support/helpers/features/top_nav_spec_helpers.rb
@@ -8,38 +8,24 @@ module Spec
module Features
module TopNavSpecHelpers
def open_top_nav
- return unless Feature.enabled?(:combined_menu, default_enabled: :yaml)
-
find('.js-top-nav-dropdown-toggle').click
end
def within_top_nav
- if Feature.enabled?(:combined_menu, default_enabled: :yaml)
- within('.js-top-nav-dropdown-menu') do
- yield
- end
- else
- within('.navbar-sub-nav') do
- yield
- end
+ within('.js-top-nav-dropdown-menu') do
+ yield
end
end
def open_top_nav_projects
- if Feature.enabled?(:combined_menu, default_enabled: :yaml)
- open_top_nav
+ open_top_nav
- within_top_nav do
- click_button('Projects')
- end
- else
- find('#nav-projects-dropdown').click
+ within_top_nav do
+ click_button('Projects')
end
end
def open_top_nav_groups
- return unless Feature.enabled?(:combined_menu, default_enabled: :yaml)
-
open_top_nav
within_top_nav do
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 38cf828ca5e..6f17d3cb496 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -654,7 +654,7 @@ module GraphqlHelpers
Class.new(Types::BaseObject) do
graphql_name 'TestType'
- field :name, GraphQL::STRING_TYPE, null: true
+ field :name, GraphQL::Types::String, null: true
yield(self) if block_given?
end
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index cc88a3fc71e..d9157fa7485 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -88,9 +88,10 @@ module LoginHelpers
# Private: Login as the specified user
#
- # user - User instance to login with
+ # user - User instance to login with
# remember - Whether or not to check "Remember me" (default: false)
- def gitlab_sign_in_with(user, remember: false)
+ # two_factor_auth - If two-factor authentication is enabled (default: false)
+ def gitlab_sign_in_with(user, remember: false, two_factor_auth: false)
visit new_user_session_path
fill_in "user_login", with: user.email
@@ -98,6 +99,11 @@ module LoginHelpers
check 'user_remember_me' if remember
click_button "Sign in"
+
+ if two_factor_auth
+ fill_in "user_otp_attempt", with: user.reload.current_otp
+ click_button "Verify code"
+ end
end
def login_via(provider, user, uid, remember_me: false, additional_info: {})
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index fa50b234bd5..ef212938af5 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -16,6 +16,23 @@ module MigrationsHelpers
end
end
+ def partitioned_table(name, by: :created_at, strategy: :monthly)
+ klass = Class.new(active_record_base) do
+ include PartitionedTable
+
+ self.table_name = name
+ self.primary_key = :id
+
+ partitioned_by by, strategy: strategy
+
+ def self.name
+ table_name.singularize.camelcase
+ end
+ end
+
+ klass.tap { Gitlab::Database::Partitioning::PartitionManager.new.sync_partitions }
+ end
+
def migrations_paths
ActiveRecord::Migrator.migrations_paths
end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 8814d260fb3..aa5fcf222f2 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -158,7 +158,7 @@ module TestEnv
component_timed_setup('Gitaly',
install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version,
- task: "gitlab:gitaly:install",
+ task: "gitlab:gitaly:test_install",
task_args: [gitaly_dir, repos_path, gitaly_url].compact) do
Gitlab::SetupHelper::Gitaly.create_configuration(
gitaly_dir,
@@ -263,8 +263,13 @@ module TestEnv
# Feature specs are run through Workhorse
def setup_workhorse
+ # Always rebuild the config file
+ if skip_compile_workhorse?
+ Gitlab::SetupHelper::Workhorse.create_configuration(workhorse_dir, nil, force: true)
+ return
+ end
+
start = Time.now
- return if skip_compile_workhorse?
FileUtils.rm_rf(workhorse_dir)
Gitlab::SetupHelper::Workhorse.compile_into(workhorse_dir)
@@ -305,12 +310,6 @@ module TestEnv
config_path = Gitlab::SetupHelper::Workhorse.get_config_path(workhorse_dir, {})
- # This should be set up in setup_workhorse, but since
- # component_needs_update? only checks that versions are consistent,
- # we need to ensure the config file exists. This line can be removed
- # later after a new Workhorse version is updated.
- Gitlab::SetupHelper::Workhorse.create_configuration(workhorse_dir, nil) unless File.exist?(config_path)
-
workhorse_pid = spawn(
{ 'PATH' => "#{ENV['PATH']}:#{workhorse_dir}" },
File.join(workhorse_dir, 'gitlab-workhorse'),
diff --git a/spec/support/helpers/tracking_helpers.rb b/spec/support/helpers/tracking_helpers.rb
new file mode 100644
index 00000000000..c0374578531
--- /dev/null
+++ b/spec/support/helpers/tracking_helpers.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+module TrackingHelpers
+ def stub_do_not_track(value)
+ request.headers['DNT'] = value
+ end
+end
diff --git a/spec/support/helpers/x509_helpers.rb b/spec/support/helpers/x509_helpers.rb
index ce0fa268ace..1dc8b1d4845 100644
--- a/spec/support/helpers/x509_helpers.rb
+++ b/spec/support/helpers/x509_helpers.rb
@@ -290,6 +290,17 @@ module X509Helpers
SIGNEDDATA
end
+ def unsigned_tag_base_data
+ <<~SIGNEDDATA
+ object 6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
+ type commit
+ tag v1.0.0
+ tagger Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> 1393491299 +0200
+
+ Release
+ SIGNEDDATA
+ end
+
def certificate_crl
'http://ch.siemens.com/pki?ZZZZZZA2.crl'
end
diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb
index 5fb6af99b79..1aa20dab6f8 100644
--- a/spec/support/import_export/common_util.rb
+++ b/spec/support/import_export/common_util.rb
@@ -83,7 +83,7 @@ module ImportExport
path = File.join(dir_path, "#{exportable_path}.json")
return unless File.exist?(path)
- ActiveSupport::JSON.decode(IO.read(path))
+ Gitlab::Json.parse(IO.read(path))
end
def consume_relations(dir_path, exportable_path, key)
@@ -93,7 +93,7 @@ module ImportExport
relations = []
File.foreach(path) do |line|
- json = ActiveSupport::JSON.decode(line)
+ json = Gitlab::Json.parse(line)
relations << json
end
@@ -101,7 +101,7 @@ module ImportExport
end
def project_json(filename)
- ActiveSupport::JSON.decode(IO.read(filename))
+ Gitlab::Json.parse(IO.read(filename))
end
end
end
diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb
index 08bbbcc7438..d3833a1e8e8 100644
--- a/spec/support/matchers/background_migrations_matchers.rb
+++ b/spec/support/matchers/background_migrations_matchers.rb
@@ -64,3 +64,33 @@ RSpec::Matchers.define :be_scheduled_migration_with_multiple_args do |*expected|
arg.sort == expected.sort
end
end
+
+RSpec::Matchers.define :have_scheduled_batched_migration do |table_name: nil, column_name: nil, job_arguments: [], **attributes|
+ define_method :matches? do |migration|
+ # Default arguments passed by BatchedMigrationWrapper (values don't matter here)
+ expect(migration).to be_background_migration_with_arguments([
+ _start_id = 1,
+ _stop_id = 2,
+ table_name,
+ column_name,
+ _sub_batch_size = 10,
+ _pause_ms = 100,
+ *job_arguments
+ ])
+
+ batched_migrations =
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .for_configuration(migration, table_name, column_name, job_arguments)
+
+ expect(batched_migrations.count).to be(1)
+ expect(batched_migrations).to all(have_attributes(attributes)) if attributes.present?
+ end
+
+ define_method :does_not_match? do |migration|
+ batched_migrations =
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .where(job_class_name: migration)
+
+ expect(batched_migrations.count).to be(0)
+ end
+end
diff --git a/spec/support/matchers/schema_matcher.rb b/spec/support/matchers/schema_matcher.rb
index 94e4359b1dd..5e08e96f4e1 100644
--- a/spec/support/matchers/schema_matcher.rb
+++ b/spec/support/matchers/schema_matcher.rb
@@ -45,6 +45,17 @@ RSpec::Matchers.define :match_response_schema do |schema, dir: nil, **options|
end
end
+RSpec::Matchers.define :match_metric_definition_schema do |path, dir: nil, **options|
+ match do |data|
+ schema_path = Pathname.new(Rails.root.join(dir.to_s, path).to_s)
+ validator = SchemaPath.validator(schema_path)
+
+ data = data.stringify_keys if data.is_a? Hash
+
+ validator.valid?(data)
+ end
+end
+
RSpec::Matchers.define :match_snowplow_schema do |schema, dir: nil, **options|
match do |data|
schema_path = Pathname.new(Rails.root.join(dir.to_s, 'spec', "fixtures/product_intelligence/#{schema}.json").to_s)
diff --git a/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb
index b10844320d0..07012914a4d 100644
--- a/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb
@@ -3,8 +3,8 @@
RSpec.shared_context 'project service activation' do
include_context 'integration activation'
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
diff --git a/spec/support/shared_contexts/fixtures/analytics_shared_context.rb b/spec/support/shared_contexts/fixtures/analytics_shared_context.rb
new file mode 100644
index 00000000000..13d3697a378
--- /dev/null
+++ b/spec/support/shared_contexts/fixtures/analytics_shared_context.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'Analytics fixtures shared context' do
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
+ let(:issue) { create(:issue, project: project, created_at: 4.days.ago) }
+ let(:issue_1) { create(:issue, project: project, created_at: 5.days.ago) }
+ let(:issue_2) { create(:issue, project: project, created_at: 4.days.ago, milestone: milestone) }
+ let(:issue_3) { create(:issue, project: project, created_at: 3.days.ago, milestone: milestone) }
+
+ let(:mr_1) { create(:merge_request, source_project: project, allow_broken: true, created_at: 20.days.ago) }
+ let(:mr_2) { create(:merge_request, source_project: project, allow_broken: true, created_at: 19.days.ago) }
+
+ let(:pipeline_1) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr_1.source_branch, sha: mr_1.source_branch_sha, head_pipeline_of: mr_1) }
+ let(:pipeline_2) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr_2.source_branch, sha: mr_2.source_branch_sha, head_pipeline_of: mr_2) }
+
+ let(:build_1) { create(:ci_build, :success, pipeline: pipeline_1, author: user) }
+ let(:build_2) { create(:ci_build, :success, pipeline: pipeline_2, author: user) }
+
+ let(:params) { { created_after: 3.months.ago, created_before: Time.now, group_id: group.full_path } }
+
+ def prepare_cycle_analytics_data
+ group.add_maintainer(user)
+ project.add_maintainer(user)
+
+ create_commit_referencing_issue(issue_1)
+ create_commit_referencing_issue(issue_2)
+
+ create_merge_request_closing_issue(user, project, issue_1)
+ create_merge_request_closing_issue(user, project, issue_2)
+ merge_merge_requests_closing_issue(user, project, issue_3)
+ end
+
+ def create_deployment
+ deploy_master(user, project, environment: 'staging')
+ deploy_master(user, project)
+ end
+
+ def update_metrics
+ issue_1.metrics.update!(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
+ issue_2.metrics.update!(first_added_to_board_at: 2.days.ago, first_mentioned_in_commit_at: 1.day.ago)
+
+ mr_1.metrics.update!({
+ merged_at: 5.days.ago,
+ first_deployed_to_production_at: 1.day.ago,
+ latest_build_started_at: 5.days.ago,
+ latest_build_finished_at: 1.day.ago,
+ pipeline: build_1.pipeline
+ })
+
+ mr_2.metrics.update!({
+ merged_at: 10.days.ago,
+ first_deployed_to_production_at: 5.days.ago,
+ latest_build_started_at: 9.days.ago,
+ latest_build_finished_at: 7.days.ago,
+ pipeline: build_2.pipeline
+ })
+ end
+
+ before do
+ stub_licensed_features(cycle_analytics_for_groups: true)
+
+ prepare_cycle_analytics_data
+ end
+end
diff --git a/spec/support/shared_contexts/graphql/requests/packages_shared_context.rb b/spec/support/shared_contexts/graphql/requests/packages_shared_context.rb
index 334b11c9f6e..645ea742f07 100644
--- a/spec/support/shared_contexts/graphql/requests/packages_shared_context.rb
+++ b/spec/support/shared_contexts/graphql/requests/packages_shared_context.rb
@@ -9,6 +9,7 @@ RSpec.shared_context 'package details setup' do
let(:depth) { 3 }
let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
let(:package_files) { all_graphql_fields_for('PackageFile') }
+ let(:dependency_links) { all_graphql_fields_for('PackageDependencyLink') }
let(:user) { project.owner }
let(:package_details) { graphql_data_at(:package) }
let(:metadata_response) { graphql_data_at(:package, :metadata) }
@@ -28,6 +29,11 @@ RSpec.shared_context 'package details setup' do
#{package_files}
}
}
+ dependencyLinks {
+ nodes {
+ #{dependency_links}
+ }
+ }
FIELDS
end
end
diff --git a/spec/support/shared_contexts/issuable/merge_request_shared_context.rb b/spec/support/shared_contexts/issuable/merge_request_shared_context.rb
index debcd9a3054..2c56411ca4c 100644
--- a/spec/support/shared_contexts/issuable/merge_request_shared_context.rb
+++ b/spec/support/shared_contexts/issuable/merge_request_shared_context.rb
@@ -11,6 +11,7 @@ RSpec.shared_context 'merge request show action' do
before do
allow(view).to receive(:experiment_enabled?).and_return(false)
allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:can_admin_project_member?)
assign(:project, project)
assign(:merge_request, merge_request)
assign(:note, note)
diff --git a/spec/support/shared_contexts/issuable/project_shared_context.rb b/spec/support/shared_contexts/issuable/project_shared_context.rb
index 5e5f6f2b7a6..b1bb9d80d78 100644
--- a/spec/support/shared_contexts/issuable/project_shared_context.rb
+++ b/spec/support/shared_contexts/issuable/project_shared_context.rb
@@ -12,5 +12,6 @@ RSpec.shared_context 'project show action' do
stub_template 'shared/issuable/_sidebar' => ''
stub_template 'projects/issues/_discussion' => ''
allow(view).to receive(:user_status).and_return('')
+ allow(view).to receive(:can_admin_project_member?)
end
end
diff --git a/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb b/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb
new file mode 100644
index 00000000000..7c8b6250d24
--- /dev/null
+++ b/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'dependency proxy helpers context' do
+ def allow_fetch_application_setting(attribute:, return_value:)
+ attributes = double
+ allow(::Gitlab::CurrentSettings.current_application_settings).to receive(:attributes).and_return(attributes)
+ allow(attributes).to receive(:fetch).with(attribute, false).and_return(return_value)
+ end
+end
diff --git a/spec/support/shared_contexts/lib/gitlab/database/background_migration_job_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/database/background_migration_job_shared_context.rb
new file mode 100644
index 00000000000..382eb796f8e
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/database/background_migration_job_shared_context.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'background migration job class' do
+ let!(:job_class_name) { 'TestJob' }
+ let!(:job_class) { Class.new }
+ let!(:job_perform_method) do
+ ->(*arguments) do
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ # Value is 'TestJob' defined by :job_class_name in the let! above.
+ # Scoping prohibits us from directly referencing job_class_name.
+ RSpec.current_example.example_group_instance.job_class_name,
+ arguments
+ )
+ end
+ end
+
+ before do
+ job_class.define_method(:perform, job_perform_method)
+ expect(Gitlab::BackgroundMigration).to receive(:migration_class_for).with(job_class_name).at_least(:once) { job_class }
+ end
+end
diff --git a/spec/support/shared_contexts/load_balancing_configuration_shared_context.rb b/spec/support/shared_contexts/load_balancing_configuration_shared_context.rb
deleted file mode 100644
index a61b8e9a074..00000000000
--- a/spec/support/shared_contexts/load_balancing_configuration_shared_context.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_context 'clear DB Load Balancing configuration' do
- def clear_load_balancing_configuration
- proxy = ::Gitlab::Database::LoadBalancing.instance_variable_get(:@proxy)
- proxy.load_balancer.release_host if proxy
- ::Gitlab::Database::LoadBalancing.instance_variable_set(:@proxy, nil)
-
- ::Gitlab::Database::LoadBalancing::Session.clear_session
- end
-
- around do |example|
- clear_load_balancing_configuration
-
- example.run
-
- clear_load_balancing_configuration
- end
-end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index b7eb03de8f0..8ae0885056e 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -176,6 +176,15 @@ RSpec.shared_context 'group navbar structure' do
}
end
+ let(:ci_cd_nav_item) do
+ {
+ nav_item: _('CI/CD'),
+ nav_sub_items: [
+ s_('Runners|Runners')
+ ]
+ }
+ end
+
let(:issues_nav_items) do
[
_('List'),
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index b0d7274269b..b432aa24bb8 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -38,12 +38,14 @@ RSpec.shared_context 'GroupPolicy context' do
delete_metrics_dashboard_annotation
update_metrics_dashboard_annotation
create_custom_emoji
+ create_package
create_package_settings
]
end
let(:maintainer_permissions) do
%i[
+ destroy_package
create_projects
read_cluster create_cluster update_cluster admin_cluster add_cluster
]
diff --git a/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
index c69a987c00d..b90270356f8 100644
--- a/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
@@ -11,7 +11,7 @@ RSpec.shared_context 'conan api setup' do
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let(:project) { package.project }
- let(:job) { create(:ci_build, :running, user: user, project: project) }
+ let(:job) { create(:ci_build, :running, user: user) }
let(:job_token) { job.token }
let(:auth_token) { personal_access_token.token }
let(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
diff --git a/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb
index c737091df48..815108be447 100644
--- a/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb
@@ -11,7 +11,7 @@ RSpec.shared_context 'npm api setup' do
let_it_be(:package, reload: true) { create(:npm_package, project: project, name: "@#{group.path}/scoped_package") }
let_it_be(:token) { create(:oauth_access_token, scopes: 'api', resource_owner: user) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
- let_it_be(:job, reload: true) { create(:ci_build, user: user, status: :running, project: project) }
+ let_it_be(:job, reload: true) { create(:ci_build, user: user, status: :running) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
diff --git a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
index ea72398010c..6b49a415889 100644
--- a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
+++ b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
@@ -6,20 +6,20 @@ RSpec.shared_context 'stubbed service ping metrics definitions' do
let(:metrics_definitions) { standard_metrics + subscription_metrics + operational_metrics + optional_metrics }
let(:standard_metrics) do
[
- metric_attributes('uuid', "Standard")
+ metric_attributes('uuid', "standard")
]
end
let(:operational_metrics) do
[
- metric_attributes('counts.merge_requests', "Operational"),
- metric_attributes('counts.todos', "Operational")
+ metric_attributes('counts.merge_requests', "operational"),
+ metric_attributes('counts.todos', "operational")
]
end
let(:optional_metrics) do
[
- metric_attributes('counts.boards', "Optional"),
+ metric_attributes('counts.boards', "optional"),
metric_attributes('gitaly.filesystems', '').except('data_category')
]
end
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index 9c8006ce4f1..cadc753513d 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -109,6 +109,18 @@ RSpec.shared_examples 'multiple issue boards' do
assert_boards_nav_active
end
+
+ it 'switches current board back' do
+ in_boards_switcher_dropdown do
+ click_link board.name
+ end
+
+ wait_for_requests
+
+ page.within('.boards-switcher') do
+ expect(page).to have_content(board.name)
+ end
+ end
end
context 'unauthorized user' do
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index 422282da4d8..a9c6da7bc2b 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -80,7 +80,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo.id)
expect(json_response.dig("provider_repos", 1, "id")).to eq(org_repo.id)
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do
@@ -156,7 +155,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("imported_projects").count).to eq(0)
expect(json_response.dig("provider_repos").count).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_2.id)
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it 'filters the list, ignoring the case of the name' do
@@ -166,7 +164,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("imported_projects").count).to eq(0)
expect(json_response.dig("provider_repos").count).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_2.id)
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
context 'when user input contains html' do
diff --git a/spec/support/shared_examples/controllers/import_controller_status_shared_examples.rb b/spec/support/shared_examples/controllers/import_controller_status_shared_examples.rb
index ecb9abc5c46..b9ae0e23e26 100644
--- a/spec/support/shared_examples/controllers/import_controller_status_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/import_controller_status_shared_examples.rb
@@ -18,7 +18,6 @@ RSpec.shared_examples 'import controller status' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_id)
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do
diff --git a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
index 3f97c031e27..30914e61df0 100644
--- a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'tracking unique visits' do |method|
+ include TrackingHelpers
+
let(:request_params) { {} }
it 'tracks unique visit if the format is HTML' do
@@ -14,14 +16,15 @@ RSpec.shared_examples 'tracking unique visits' do |method|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
.to receive(:track_event).with(target_id, values: kind_of(String))
- request.headers['DNT'] = '0'
+ stub_do_not_track('0')
get method, params: request_params, format: :html
end
it 'does not track unique visit if DNT is enabled' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- request.headers['DNT'] = '1'
+
+ stub_do_not_track('1')
get method, params: request_params, format: :html
end
diff --git a/spec/support/shared_examples/features/dependency_proxy_shared_examples.rb b/spec/support/shared_examples/features/dependency_proxy_shared_examples.rb
new file mode 100644
index 00000000000..d29c677a962
--- /dev/null
+++ b/spec/support/shared_examples/features/dependency_proxy_shared_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a successful blob pull' do
+ it 'sends a file' do
+ expect(controller).to receive(:send_file).with(blob.file.path, {})
+
+ subject
+ end
+
+ it 'returns Content-Disposition: attachment', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ end
+end
+
+RSpec.shared_examples 'a successful manifest pull' do
+ it 'sends a file' do
+ expect(controller).to receive(:send_file).with(manifest.file.path, type: manifest.content_type)
+
+ subject
+ end
+
+ it 'returns Content-Disposition: attachment', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Docker-Content-Digest']).to eq(manifest.digest)
+ expect(response.headers['Content-Length']).to eq(manifest.size)
+ expect(response.headers['Docker-Distribution-Api-Version']).to eq(DependencyProxy::DISTRIBUTION_API_VERSION)
+ expect(response.headers['Etag']).to eq("\"#{manifest.digest}\"")
+ expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ end
+end
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index ff2878f77b4..fb2e422559d 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -308,7 +308,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
let(:reply_id) { find("#{comments_selector} .note:last-of-type", match: :first)['data-note-id'] }
it 'can be replied to after resolving' do
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
wait_for_requests
refresh
@@ -320,7 +320,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
it 'shows resolved thread when toggled' do
submit_reply('a')
- find('button[data-qa-selector="resolve_discussion_button"]').click
+ find('button[data-qa-selector="resolve_discussion_button"]').click # rubocop:disable QA/SelectorUsage
wait_for_requests
expect(page).to have_selector(".note-row-#{note_id}", visible: true)
diff --git a/spec/support/shared_examples/features/manage_applications_shared_examples.rb b/spec/support/shared_examples/features/manage_applications_shared_examples.rb
new file mode 100644
index 00000000000..38bb87eaed2
--- /dev/null
+++ b/spec/support/shared_examples/features/manage_applications_shared_examples.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'manage applications' do
+ let_it_be(:application_name) { 'application foo bar' }
+ let_it_be(:application_name_changed) { "#{application_name} changed" }
+ let_it_be(:application_redirect_uri) { 'https://foo.bar' }
+
+ it 'allows user to manage applications' do
+ visit new_application_path
+
+ expect(page).to have_content 'Add new application'
+
+ fill_in :doorkeeper_application_name, with: application_name
+ fill_in :doorkeeper_application_redirect_uri, with: application_redirect_uri
+ check :doorkeeper_application_scopes_read_user
+ click_on 'Save application'
+
+ validate_application(application_name, 'Yes')
+
+ application = Doorkeeper::Application.find_by(name: application_name)
+ expect(page).to have_css("button[title=\"Copy secret\"][data-clipboard-text=\"#{application.secret}\"]", text: 'Copy')
+
+ click_on 'Edit'
+
+ application_name_changed = "#{application_name} changed"
+
+ fill_in :doorkeeper_application_name, with: application_name_changed
+ uncheck :doorkeeper_application_confidential
+ click_on 'Save application'
+
+ validate_application(application_name_changed, 'No')
+
+ visit_applications_path
+
+ page.within '.oauth-applications' do
+ click_on 'Destroy'
+ end
+ expect(page.find('.oauth-applications')).not_to have_content 'test_changed'
+ end
+
+ context 'when scopes are blank' do
+ it 'returns an error' do
+ visit new_application_path
+
+ expect(page).to have_content 'Add new application'
+
+ fill_in :doorkeeper_application_name, with: application_name
+ fill_in :doorkeeper_application_redirect_uri, with: application_redirect_uri
+ click_on 'Save application'
+
+ expect(page).to have_content("Scopes can't be blank")
+ end
+ end
+
+ def visit_applications_path
+ visit defined?(applications_path) ? applications_path : new_application_path
+ end
+
+ def validate_application(name, confidential)
+ aggregate_failures do
+ expect(page).to have_content name
+ expect(page).to have_content 'Application ID'
+ expect(page).to have_content 'Secret'
+ expect(page).to have_content "Confidential #{confidential}"
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/packages_shared_examples.rb b/spec/support/shared_examples/features/packages_shared_examples.rb
index 9e88db2e1c0..96be30b9f1f 100644
--- a/spec/support/shared_examples/features/packages_shared_examples.rb
+++ b/spec/support/shared_examples/features/packages_shared_examples.rb
@@ -14,7 +14,7 @@ RSpec.shared_examples 'packages list' do |check_project_name: false|
end
def package_table_row(index)
- page.all("#{packages_table_selector} > [data-qa-selector=\"package_row\"]")[index].text
+ page.all("#{packages_table_selector} > [data-qa-selector=\"package_row\"]")[index].text # rubocop:disable QA/SelectorUsage
end
end
@@ -34,10 +34,8 @@ RSpec.shared_examples 'package details link' do |property|
expect(page).to have_css('.packages-app h1[data-testid="title"]', text: package.name)
- page.within(%Q([name="#{package.name}"])) do
- expect(page).to have_content('Installation')
- expect(page).to have_content('Registry setup')
- end
+ expect(page).to have_content('Installation')
+ expect(page).to have_content('Registry setup')
end
end
@@ -92,7 +90,7 @@ RSpec.shared_examples 'shared package sorting' do
end
def packages_table_selector
- '[data-qa-selector="packages-table"]'
+ '[data-qa-selector="packages-table"]' # rubocop:disable QA/SelectorUsage
end
def click_sort_option(option, ascending)
@@ -100,7 +98,7 @@ def click_sort_option(option, ascending)
# Reset the sort direction
click_button 'Sort direction' if page.has_selector?('svg[aria-label="Sorting Direction: Ascending"]', wait: 0)
- find('button.dropdown-menu-toggle').click
+ find('button.gl-dropdown-toggle').click
page.within('.dropdown-menu') do
click_button option
diff --git a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
index 56154c7cd03..8212f14d6be 100644
--- a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
@@ -23,6 +23,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
end
click_on_protect
+ wait_for_requests
expect(ProtectedBranch.count).to eq(1)
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to eq([access_type_id])
diff --git a/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
index 28fe198c9c3..14142793a0d 100644
--- a/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
@@ -23,7 +23,7 @@ RSpec.shared_examples 'Deploy keys with protected branches' do
find(".js-allowed-to-push").click
wait_for_requests
- within('.qa-allowed-to-push-dropdown') do
+ within('.qa-allowed-to-push-dropdown') do # rubocop:disable QA/SelectorUsage
dropdown_headers = page.all('.dropdown-header').map(&:text)
expect(dropdown_headers).to contain_exactly(*all_dropdown_sections)
@@ -38,7 +38,7 @@ RSpec.shared_examples 'Deploy keys with protected branches' do
find(".js-allowed-to-merge").click
wait_for_requests
- within('.qa-allowed-to-merge-dropdown') do
+ within('.qa-allowed-to-merge-dropdown') do # rubocop:disable QA/SelectorUsage
dropdown_headers = page.all('.dropdown-header').map(&:text)
expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
@@ -68,7 +68,7 @@ RSpec.shared_examples 'Deploy keys with protected branches' do
find(".js-allowed-to-push").click
wait_for_requests
- within('.qa-allowed-to-push-dropdown') do
+ within('.qa-allowed-to-push-dropdown') do # rubocop:disable QA/SelectorUsage
dropdown_headers = page.all('.dropdown-header').map(&:text)
expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
diff --git a/spec/support/shared_examples/features/rss_shared_examples.rb b/spec/support/shared_examples/features/rss_shared_examples.rb
index 1b0d3f9605a..c7c2aeea358 100644
--- a/spec/support/shared_examples/features/rss_shared_examples.rb
+++ b/spec/support/shared_examples/features/rss_shared_examples.rb
@@ -9,7 +9,7 @@ end
RSpec.shared_examples "it has an RSS button with current_user's feed token" do
it "shows the RSS button with current_user's feed token" do
expect(page)
- .to have_css("a:has(.qa-rss-icon)[href*='feed_token=#{user.feed_token}']")
+ .to have_css("a:has(.qa-rss-icon)[href*='feed_token=#{user.feed_token}']") # rubocop:disable QA/SelectorUsage
end
end
@@ -22,6 +22,6 @@ end
RSpec.shared_examples "it has an RSS button without a feed token" do
it "shows the RSS button without a feed token" do
expect(page)
- .to have_css("a:has(.qa-rss-icon):not([href*='feed_token'])")
+ .to have_css("a:has(.qa-rss-icon):not([href*='feed_token'])") # rubocop:disable QA/SelectorUsage
end
end
diff --git a/spec/support/shared_examples/features/variable_list_shared_examples.rb b/spec/support/shared_examples/features/variable_list_shared_examples.rb
index 997500415a9..52451839281 100644
--- a/spec/support/shared_examples/features/variable_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_shared_examples.rb
@@ -91,7 +91,7 @@ RSpec.shared_examples 'variable list' do
end
page.within('#add-ci-variable') do
- find('[data-qa-selector="ci_variable_key_field"] input').set('new_key')
+ find('[data-qa-selector="ci_variable_key_field"] input').set('new_key') # rubocop:disable QA/SelectorUsage
click_button('Update variable')
end
@@ -173,7 +173,7 @@ RSpec.shared_examples 'variable list' do
click_button('Add variable')
page.within('#add-ci-variable') do
- find('[data-qa-selector="ci_variable_key_field"] input').set('empty_mask_key')
+ find('[data-qa-selector="ci_variable_key_field"] input').set('empty_mask_key') # rubocop:disable QA/SelectorUsage
find('[data-testid="ci-variable-protected-checkbox"]').click
find('[data-testid="ci-variable-masked-checkbox"]').click
@@ -286,8 +286,8 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
page.within('#add-ci-variable') do
- find('[data-qa-selector="ci_variable_key_field"] input').set(key)
- find('[data-qa-selector="ci_variable_value_field"]').set(value) if value.present?
+ find('[data-qa-selector="ci_variable_key_field"] input').set(key) # rubocop:disable QA/SelectorUsage
+ find('[data-qa-selector="ci_variable_value_field"]').set(value) if value.present? # rubocop:disable QA/SelectorUsage
find('[data-testid="ci-variable-protected-checkbox"]').click if protected
find('[data-testid="ci-variable-masked-checkbox"]').click if masked
diff --git a/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
index 3b2fda4e05b..6fdc5ecae73 100644
--- a/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'User views AsciiDoc page with includes' do
- let_it_be(:wiki_content_selector) { '[data-qa-selector=wiki_page_content]' }
+ let_it_be(:wiki_content_selector) { '[data-qa-selector=wiki_page_content]' } # rubocop:disable QA/SelectorUsage
let!(:included_wiki_page) { create_wiki_page('included_page', content: 'Content from the included page')}
let!(:wiki_page) { create_wiki_page('home', content: "Content from the main page.\ninclude::included_page.asciidoc[]") }
diff --git a/spec/support/shared_examples/finders/security/jobs_finder_shared_examples.rb b/spec/support/shared_examples/finders/security/jobs_finder_shared_examples.rb
index a332b213866..117b35201f6 100644
--- a/spec/support/shared_examples/finders/security/jobs_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/security/jobs_finder_shared_examples.rb
@@ -68,20 +68,6 @@ RSpec.shared_examples ::Security::JobsFinder do |default_job_types|
end
end
- context 'when using legacy CI build metadata config storage' do
- before do
- stub_feature_flags(ci_build_metadata_config: false)
- end
-
- it_behaves_like 'JobsFinder core functionality'
- end
-
- context 'when using the new CI build metadata config storage' do
- before do
- stub_feature_flags(ci_build_metadata_config: true)
- end
-
- it_behaves_like 'JobsFinder core functionality'
- end
+ it_behaves_like 'JobsFinder core functionality'
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/reports/security/locations/locations_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/reports/security/locations/locations_shared_examples.rb
new file mode 100644
index 00000000000..3aa04a77e57
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/ci/reports/security/locations/locations_shared_examples.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'vulnerability location' do
+ describe '#initialize' do
+ subject { described_class.new(**params) }
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(**params)
+ end
+ end
+
+ where(:param) do
+ mandatory_params
+ end
+
+ with_them do
+ context "when param #{params[:param]} is missing" do
+ before do
+ params.delete(param)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+ end
+
+ describe '#fingerprint' do
+ subject { described_class.new(**params).fingerprint }
+
+ it "generates expected fingerprint" do
+ expect(subject).to eq(expected_fingerprint)
+ end
+ end
+
+ describe '#fingerprint_path' do
+ subject { described_class.new(**params).fingerprint_path }
+
+ it "generates expected fingerprint" do
+ expect(subject).to eq(expected_fingerprint_path)
+ end
+ end
+
+ describe '#==' do
+ let(:location_1) { create(:ci_reports_security_locations_sast) }
+ let(:location_2) { create(:ci_reports_security_locations_sast) }
+
+ subject { location_1 == location_2 }
+
+ it "returns true when fingerprints are equal" do
+ allow(location_1).to receive(:fingerprint).and_return('fingerprint')
+ allow(location_2).to receive(:fingerprint).and_return('fingerprint')
+
+ expect(subject).to eq(true)
+ end
+
+ it "returns false when fingerprints are different" do
+ allow(location_1).to receive(:fingerprint).and_return('fingerprint')
+ allow(location_2).to receive(:fingerprint).and_return('another_fingerprint')
+
+ expect(subject).to eq(false)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
index 20f3270526e..7888ade56eb 100644
--- a/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
@@ -21,3 +21,46 @@ RSpec.shared_examples 'marks background migration job records' do
expect(jobs_updated).to eq(1)
end
end
+
+RSpec.shared_examples 'finalized background migration' do
+ it 'processed the scheduled sidekiq queue' do
+ queued = Sidekiq::ScheduledSet
+ .new
+ .select do |scheduled|
+ scheduled.klass == 'BackgroundMigrationWorker' &&
+ scheduled.args.first == job_class_name
+ end
+ expect(queued.size).to eq(0)
+ end
+
+ it 'processed the async sidekiq queue' do
+ queued = Sidekiq::Queue.new('BackgroundMigrationWorker')
+ .select { |scheduled| scheduled.klass == job_class_name }
+ expect(queued.size).to eq(0)
+ end
+
+ include_examples 'removed tracked jobs', 'pending'
+end
+
+RSpec.shared_examples 'finalized tracked background migration' do
+ include_examples 'finalized background migration'
+ include_examples 'removed tracked jobs', 'succeeded'
+end
+
+RSpec.shared_examples 'removed tracked jobs' do |status|
+ it "removes '#{status}' tracked jobs" do
+ jobs = Gitlab::Database::BackgroundMigrationJob
+ .where(status: Gitlab::Database::BackgroundMigrationJob.statuses[status])
+ .for_migration_class(job_class_name)
+ expect(jobs).to be_empty
+ end
+end
+
+RSpec.shared_examples 'retained tracked jobs' do |status|
+ it "retains '#{status}' tracked jobs" do
+ jobs = Gitlab::Database::BackgroundMigrationJob
+ .where(status: Gitlab::Database::BackgroundMigrationJob.statuses[status])
+ .for_migration_class(job_class_name)
+ expect(jobs).to be_present
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
index 88e6ffd15a8..a617342ff8c 100644
--- a/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
@@ -11,7 +11,7 @@ RSpec.shared_examples 'CTE with MATERIALIZED keyword examples' do
context 'when PG version is <12' do
it 'does not add MATERIALIZE keyword' do
- allow(Gitlab::Database).to receive(:version).and_return('11.1')
+ allow(Gitlab::Database.main).to receive(:version).and_return('11.1')
expect(query).to include(expected_query_block_without_materialized)
end
@@ -19,14 +19,14 @@ RSpec.shared_examples 'CTE with MATERIALIZED keyword examples' do
context 'when PG version is >=12' do
it 'adds MATERIALIZE keyword' do
- allow(Gitlab::Database).to receive(:version).and_return('12.1')
+ allow(Gitlab::Database.main).to receive(:version).and_return('12.1')
expect(query).to include(expected_query_block_with_materialized)
end
context 'when version is higher than 12' do
it 'adds MATERIALIZE keyword' do
- allow(Gitlab::Database).to receive(:version).and_return('15.1')
+ allow(Gitlab::Database.main).to receive(:version).and_return('15.1')
expect(query).to include(expected_query_block_with_materialized)
end
diff --git a/spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb
index 72d672fd36c..69a1f7ad11e 100644
--- a/spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb
@@ -14,10 +14,10 @@ RSpec.shared_examples 'performs validation' do |validation_option|
it 'performs validation' do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
model.add_concurrent_foreign_key(*args, **options.merge(validation_option))
end
diff --git a/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
index 8d758ed1655..ead8b174d46 100644
--- a/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
@@ -22,7 +22,7 @@ RSpec.shared_examples 'SQL set operator' do |operator_keyword|
end
it 'skips Model.none segments' do
- empty_relation = User.none
+ empty_relation = User.none.select(:id)
set_operator = described_class.new([empty_relation, relation_1, relation_2])
expect {User.where("users.id IN (#{set_operator.to_sql})").to_a}.not_to raise_error
@@ -44,6 +44,17 @@ RSpec.shared_examples 'SQL set operator' do |operator_keyword|
end
end
+ context 'when uneven select values are used' do
+ let(:relation_1) { User.where(email: 'alice@example.com').select(*User.column_names) }
+ let(:relation_2) { User.where(email: 'bob@example.com') }
+
+ it 'raises error' do
+ expect do
+ described_class.new([relation_1, relation_2])
+ end.to raise_error /Relations with uneven select values were passed/
+ end
+ end
+
describe 'remove_order parameter' do
let(:scopes) do
[
diff --git a/spec/support/shared_examples/helpers/groups_shared_examples.rb b/spec/support/shared_examples/lib/menus_shared_examples.rb
index 9c74d25b31f..2c2cb362b07 100644
--- a/spec/support/shared_examples/helpers/groups_shared_examples.rb
+++ b/spec/support/shared_examples/lib/menus_shared_examples.rb
@@ -1,30 +1,16 @@
# frozen_string_literal: true
-# This shared_example requires the following variables:
-# - current_user
-# - group
-# - type, the issuable type (ie :issues, :merge_requests)
-# - count_service, the Service used by the specified issuable type
-
-RSpec.shared_examples 'cached issuables count' do
- subject { helper.cached_issuables_count(group, type: type) }
-
- before do
- allow(helper).to receive(:current_user) { current_user }
- allow(count_service).to receive(:new).and_call_original
- end
+RSpec.shared_examples_for 'pill_count formatted results' do
+ let(:count_service) { raise NotImplementedError }
- it 'calls the correct service class' do
- subject
- expect(count_service).to have_received(:new).with(group, current_user)
- end
+ subject(:pill_count) { menu.pill_count }
it 'returns all digits for count value under 1000' do
allow_next_instance_of(count_service) do |service|
allow(service).to receive(:count).and_return(999)
end
- expect(subject).to eq('999')
+ expect(pill_count).to eq('999')
end
it 'returns truncated digits for count value over 1000' do
@@ -32,7 +18,7 @@ RSpec.shared_examples 'cached issuables count' do
allow(service).to receive(:count).and_return(2300)
end
- expect(subject).to eq('2.3k')
+ expect(pill_count).to eq('2.3k')
end
it 'returns truncated digits for count value over 10000' do
@@ -40,7 +26,7 @@ RSpec.shared_examples 'cached issuables count' do
allow(service).to receive(:count).and_return(12560)
end
- expect(subject).to eq('12.6k')
+ expect(pill_count).to eq('12.6k')
end
it 'returns truncated digits for count value over 100000' do
@@ -48,6 +34,6 @@ RSpec.shared_examples 'cached issuables count' do
allow(service).to receive(:count).and_return(112560)
end
- expect(subject).to eq('112.6k')
+ expect(pill_count).to eq('112.6k')
end
end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index a84658780b9..c6d6ff6bc1d 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -1,106 +1,126 @@
# frozen_string_literal: true
RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
+ let(:db_config_name) { ::Gitlab::Database.db_config_names.first }
+
+ let(:expected_payload_defaults) do
+ metrics =
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_counter_keys +
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_duration_keys +
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_keys
+
+ metrics.each_with_object({}) do |key, result|
+ result[key] = 0
+ end
+ end
+
+ def transform_hash(hash, another_hash)
+ another_hash.each do |key, value|
+ raise "Unexpected key: #{key}" unless hash[key]
+ end
+
+ hash.merge(another_hash)
+ end
+
it 'prevents db counters from leaking to the next transaction' do
2.times do
Gitlab::WithRequestStore.with_request_store do
subscriber.sql(event)
- connection = event.payload[:connection]
-
- if db_role == :primary
- expected = {
- db_count: record_query ? 1 : 0,
- db_write_count: record_write_query ? 1 : 0,
- db_cached_count: record_cached_query ? 1 : 0,
- db_primary_cached_count: record_cached_query ? 1 : 0,
- db_primary_count: record_query ? 1 : 0,
- db_primary_duration_s: record_query ? 0.002 : 0,
- db_replica_cached_count: 0,
- db_replica_count: 0,
- db_replica_duration_s: 0.0,
- db_primary_wal_count: record_wal_query ? 1 : 0,
- db_primary_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
- db_replica_wal_cached_count: 0,
- db_replica_wal_count: 0
- }
- expected[:"db_primary_#{::Gitlab::Database.dbname(connection)}_duration_s"] = 0.002 if record_query
- elsif db_role == :replica
- expected = {
- db_count: record_query ? 1 : 0,
- db_write_count: record_write_query ? 1 : 0,
- db_cached_count: record_cached_query ? 1 : 0,
- db_primary_cached_count: 0,
- db_primary_count: 0,
- db_primary_duration_s: 0.0,
- db_replica_cached_count: record_cached_query ? 1 : 0,
- db_replica_count: record_query ? 1 : 0,
- db_replica_duration_s: record_query ? 0.002 : 0,
- db_replica_wal_count: record_wal_query ? 1 : 0,
- db_replica_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
- db_primary_wal_cached_count: 0,
- db_primary_wal_count: 0
- }
- expected[:"db_replica_#{::Gitlab::Database.dbname(connection)}_duration_s"] = 0.002 if record_query
- else
- expected = {
- db_count: record_query ? 1 : 0,
- db_write_count: record_write_query ? 1 : 0,
- db_cached_count: record_cached_query ? 1 : 0
- }
- end
+
+ expected = if db_role == :primary
+ transform_hash(expected_payload_defaults, {
+ db_count: record_query ? 1 : 0,
+ db_write_count: record_write_query ? 1 : 0,
+ db_cached_count: record_cached_query ? 1 : 0,
+ db_primary_cached_count: record_cached_query ? 1 : 0,
+ "db_primary_#{db_config_name}_cached_count": record_cached_query ? 1 : 0,
+ db_primary_count: record_query ? 1 : 0,
+ "db_primary_#{db_config_name}_count": record_query ? 1 : 0,
+ db_primary_duration_s: record_query ? 0.002 : 0,
+ "db_primary_#{db_config_name}_duration_s": record_query ? 0.002 : 0,
+ db_primary_wal_count: record_wal_query ? 1 : 0,
+ "db_primary_#{db_config_name}_wal_count": record_wal_query ? 1 : 0,
+ db_primary_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
+ "db_primary_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0
+ })
+ elsif db_role == :replica
+ transform_hash(expected_payload_defaults, {
+ db_count: record_query ? 1 : 0,
+ db_write_count: record_write_query ? 1 : 0,
+ db_cached_count: record_cached_query ? 1 : 0,
+ db_replica_cached_count: record_cached_query ? 1 : 0,
+ "db_replica_#{db_config_name}_cached_count": record_cached_query ? 1 : 0,
+ db_replica_count: record_query ? 1 : 0,
+ "db_replica_#{db_config_name}_count": record_query ? 1 : 0,
+ db_replica_duration_s: record_query ? 0.002 : 0,
+ "db_replica_#{db_config_name}_duration_s": record_query ? 0.002 : 0,
+ db_replica_wal_count: record_wal_query ? 1 : 0,
+ "db_replica_#{db_config_name}_wal_count": record_wal_query ? 1 : 0,
+ db_replica_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
+ "db_replica_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0
+ })
+ else
+ {
+ db_count: record_query ? 1 : 0,
+ db_write_count: record_write_query ? 1 : 0,
+ db_cached_count: record_cached_query ? 1 : 0
+ }
+ end
expect(described_class.db_counter_payload).to eq(expected)
end
end
end
- context 'when multiple_database_metrics is disabled' do
+ context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
before do
- stub_feature_flags(multiple_database_metrics: false)
+ stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
end
it 'does not include per database metrics' do
Gitlab::WithRequestStore.with_request_store do
subscriber.sql(event)
- connection = event.payload[:connection]
- expect(described_class.db_counter_payload).not_to include(:"db_replica_#{::Gitlab::Database.dbname(connection)}_duration_s")
+ expect(described_class.db_counter_payload).not_to include(:"db_replica_#{db_config_name}_duration_s")
+ expect(described_class.db_counter_payload).not_to include(:"db_replica_#{db_config_name}_count")
end
end
end
end
RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do |db_role|
+ let(:db_config_name) { ::Gitlab::Database.db_config_name(ApplicationRecord.connection) }
+
it 'increments only db counters' do
if record_query
- expect(transaction).to receive(:increment).with(:gitlab_transaction_db_count_total, 1)
- expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_count_total".to_sym, 1) if db_role
+ expect(transaction).to receive(:increment).with(:gitlab_transaction_db_count_total, 1, { db_config_name: db_config_name })
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_count_total".to_sym, 1, { db_config_name: db_config_name }) if db_role
else
- expect(transaction).not_to receive(:increment).with(:gitlab_transaction_db_count_total, 1)
- expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_count_total".to_sym, 1) if db_role
+ expect(transaction).not_to receive(:increment).with(:gitlab_transaction_db_count_total, 1, { db_config_name: db_config_name })
+ expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_count_total".to_sym, 1, { db_config_name: db_config_name }) if db_role
end
if record_write_query
- expect(transaction).to receive(:increment).with(:gitlab_transaction_db_write_count_total, 1)
+ expect(transaction).to receive(:increment).with(:gitlab_transaction_db_write_count_total, 1, { db_config_name: db_config_name })
else
- expect(transaction).not_to receive(:increment).with(:gitlab_transaction_db_write_count_total, 1)
+ expect(transaction).not_to receive(:increment).with(:gitlab_transaction_db_write_count_total, 1, { db_config_name: db_config_name })
end
if record_cached_query
- expect(transaction).to receive(:increment).with(:gitlab_transaction_db_cached_count_total, 1)
- expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_cached_count_total".to_sym, 1) if db_role
+ expect(transaction).to receive(:increment).with(:gitlab_transaction_db_cached_count_total, 1, { db_config_name: db_config_name })
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_cached_count_total".to_sym, 1, { db_config_name: db_config_name }) if db_role
else
- expect(transaction).not_to receive(:increment).with(:gitlab_transaction_db_cached_count_total, 1)
- expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_cached_count_total".to_sym, 1) if db_role
+ expect(transaction).not_to receive(:increment).with(:gitlab_transaction_db_cached_count_total, 1, { db_config_name: db_config_name })
+ expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_cached_count_total".to_sym, 1, { db_config_name: db_config_name }) if db_role
end
if record_wal_query
if db_role
- expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1)
- expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_cached_count_total".to_sym, 1) if record_cached_query
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1, { db_config_name: db_config_name })
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_cached_count_total".to_sym, 1, { db_config_name: db_config_name }) if record_cached_query
end
else
- expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1) if db_role
+ expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1, { db_config_name: db_config_name }) if db_role
end
subscriber.sql(event)
@@ -108,14 +128,34 @@ RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do
it 'observes sql_duration metric' do
if record_query
- expect(transaction).to receive(:observe).with(:gitlab_sql_duration_seconds, 0.002)
- expect(transaction).to receive(:observe).with("gitlab_sql_#{db_role}_duration_seconds".to_sym, 0.002) if db_role
+ expect(transaction).to receive(:observe).with(:gitlab_sql_duration_seconds, 0.002, { db_config_name: db_config_name })
+ expect(transaction).to receive(:observe).with("gitlab_sql_#{db_role}_duration_seconds".to_sym, 0.002, { db_config_name: db_config_name }) if db_role
else
expect(transaction).not_to receive(:observe)
end
subscriber.sql(event)
end
+
+ context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
+ before do
+ stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
+ end
+
+ it 'does not include db_config_name label' do
+ allow(transaction).to receive(:increment) do |*args|
+ labels = args[2] || {}
+ expect(labels).not_to include(:db_config_name)
+ end
+
+ allow(transaction).to receive(:observe) do |*args|
+ labels = args[2] || {}
+ expect(labels).not_to include(:db_config_name)
+ end
+
+ subscriber.sql(event)
+ end
+ end
end
RSpec.shared_examples 'record ActiveRecord metrics' do |db_role|
diff --git a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
index 99a09993900..f92ed3d7396 100644
--- a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
@@ -62,26 +62,6 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
.to raise_error(ActiveModel::MissingAttributeError)
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(efficient_counter_attribute: false)
- end
-
- it 'delegates to ActiveRecord update!' do
- expect { subject }
- .to change { model.reset.read_attribute(attribute) }.by(increment)
- end
-
- it 'does not increment the counter in Redis' do
- subject
-
- Gitlab::Redis::SharedState.with do |redis|
- counter = redis.get(model.counter_key(attribute))
- expect(counter).to be_nil
- end
- end
- end
end
end
end
diff --git a/spec/support/shared_examples/models/concerns/incident_management/escalatable_shared_examples.rb b/spec/support/shared_examples/models/concerns/incident_management/escalatable_shared_examples.rb
new file mode 100644
index 00000000000..7b33a95bfa1
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/incident_management/escalatable_shared_examples.rb
@@ -0,0 +1,246 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a model including Escalatable' do
+ # rubocop:disable Rails/SaveBang -- Usage of factory symbol as argument causes a false-positive
+ let_it_be(:escalatable_factory) { factory_from_class(described_class) }
+ let_it_be(:triggered_escalatable, reload: true) { create(escalatable_factory, :triggered) }
+ let_it_be(:acknowledged_escalatable, reload: true) { create(escalatable_factory, :acknowledged) }
+ let_it_be(:resolved_escalatable, reload: true) { create(escalatable_factory, :resolved) }
+ let_it_be(:ignored_escalatable, reload: true) { create(escalatable_factory, :ignored) }
+
+ context 'validations' do
+ it { is_expected.to validate_presence_of(:status) }
+
+ context 'when status is triggered' do
+ subject { triggered_escalatable }
+
+ context 'when resolved_at is blank' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'when resolved_at is present' do
+ before do
+ triggered_escalatable.resolved_at = Time.current
+ end
+
+ it { is_expected.to be_invalid }
+ end
+ end
+
+ context 'when status is acknowledged' do
+ subject { acknowledged_escalatable }
+
+ context 'when resolved_at is blank' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'when resolved_at is present' do
+ before do
+ acknowledged_escalatable.resolved_at = Time.current
+ end
+
+ it { is_expected.to be_invalid }
+ end
+ end
+
+ context 'when status is resolved' do
+ subject { resolved_escalatable }
+
+ context 'when resolved_at is blank' do
+ before do
+ resolved_escalatable.resolved_at = nil
+ end
+
+ it { is_expected.to be_invalid }
+ end
+
+ context 'when resolved_at is present' do
+ it { is_expected.to be_valid }
+ end
+ end
+
+ context 'when status is ignored' do
+ subject { ignored_escalatable }
+
+ context 'when resolved_at is blank' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'when resolved_at is present' do
+ before do
+ ignored_escalatable.resolved_at = Time.current
+ end
+
+ it { is_expected.to be_invalid }
+ end
+ end
+ end
+
+ context 'scopes' do
+ let(:all_escalatables) { described_class.where(id: [triggered_escalatable, acknowledged_escalatable, ignored_escalatable, resolved_escalatable])}
+
+ describe '.order_status' do
+ subject { all_escalatables.order_status(order) }
+
+ context 'descending' do
+ let(:order) { :desc }
+
+ # Downward arrow in UI always corresponds to default sort
+ it { is_expected.to eq([triggered_escalatable, acknowledged_escalatable, resolved_escalatable, ignored_escalatable]) }
+ end
+
+ context 'ascending' do
+ let(:order) { :asc }
+
+ it { is_expected.to eq([ignored_escalatable, resolved_escalatable, acknowledged_escalatable, triggered_escalatable]) }
+ end
+ end
+ end
+
+ describe '.status_value' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :status_value) do
+ :triggered | 0
+ :acknowledged | 1
+ :resolved | 2
+ :ignored | 3
+ :unknown | nil
+ end
+
+ with_them do
+ it 'returns status value by its name' do
+ expect(described_class.status_value(status)).to eq(status_value)
+ end
+ end
+ end
+
+ describe '.status_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:raw_status, :status) do
+ 0 | :triggered
+ 1 | :acknowledged
+ 2 | :resolved
+ 3 | :ignored
+ -1 | nil
+ end
+
+ with_them do
+ it 'returns status name by its values' do
+ expect(described_class.status_name(raw_status)).to eq(status)
+ end
+ end
+ end
+
+ describe '#trigger' do
+ subject { escalatable.trigger }
+
+ context 'when escalatable is in triggered state' do
+ let(:escalatable) { triggered_escalatable }
+
+ it 'does not change the escalatable status' do
+ expect { subject }.not_to change { escalatable.reload.status }
+ end
+ end
+
+ context 'when escalatable is not in triggered state' do
+ let(:escalatable) { resolved_escalatable }
+
+ it 'changes the escalatable status to triggered' do
+ expect { subject }.to change { escalatable.triggered? }.to(true)
+ end
+
+ it 'resets resolved at' do
+ expect { subject }.to change { escalatable.reload.resolved_at }.to nil
+ end
+ end
+ end
+
+ describe '#acknowledge' do
+ subject { escalatable.acknowledge }
+
+ let(:escalatable) { resolved_escalatable }
+
+ it 'changes the escalatable status to acknowledged' do
+ expect { subject }.to change { escalatable.acknowledged? }.to(true)
+ end
+
+ it 'resets ended at' do
+ expect { subject }.to change { escalatable.reload.resolved_at }.to nil
+ end
+ end
+
+ describe '#resolve' do
+ let!(:resolved_at) { Time.current }
+
+ subject do
+ escalatable.resolved_at = resolved_at
+ escalatable.resolve
+ end
+
+ context 'when escalatable is already resolved' do
+ let(:escalatable) { resolved_escalatable }
+
+ it 'does not change the escalatable status' do
+ expect { subject }.not_to change { resolved_escalatable.reload.status }
+ end
+ end
+
+ context 'when escalatable is not resolved' do
+ let(:escalatable) { triggered_escalatable }
+
+ it 'changes escalatable status to "resolved"' do
+ expect { subject }.to change { escalatable.resolved? }.to(true)
+ end
+ end
+ end
+
+ describe '#ignore' do
+ subject { escalatable.ignore }
+
+ let(:escalatable) { resolved_escalatable }
+
+ it 'changes the escalatable status to ignored' do
+ expect { subject }.to change { escalatable.ignored? }.to(true)
+ end
+
+ it 'resets ended at' do
+ expect { subject }.to change { escalatable.reload.resolved_at }.to nil
+ end
+ end
+
+ describe '#status_event_for' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:for_status, :event) do
+ :triggered | :trigger
+ 'triggered' | :trigger
+ :acknowledged | :acknowledge
+ 'acknowledged' | :acknowledge
+ :resolved | :resolve
+ 'resolved' | :resolve
+ :ignored | :ignore
+ 'ignored' | :ignore
+ :unknown | nil
+ nil | nil
+ '' | nil
+ 1 | nil
+ end
+
+ with_them do
+ let(:escalatable) { build(escalatable_factory) }
+
+ it 'returns event by status name' do
+ expect(escalatable.status_event_for(for_status)).to eq(event)
+ end
+ end
+ end
+
+ private
+
+ def factory_from_class(klass)
+ klass.name.underscore.tr('/', '_')
+ end
+end
+# rubocop:enable Rails/SaveBang
diff --git a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
index cf38a583944..457ee49938f 100644
--- a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
@@ -13,6 +13,7 @@ RSpec.shared_examples 'value stream analytics stage' do
describe 'associations' do
it { is_expected.to belong_to(:end_event_label) }
it { is_expected.to belong_to(:start_event_label) }
+ it { is_expected.to belong_to(:stage_event_hash) }
end
describe 'validation' do
@@ -138,6 +139,67 @@ RSpec.shared_examples 'value stream analytics stage' do
expect(stage_1.events_hash_code).not_to eq(stage_2.events_hash_code)
end
end
+
+ # rubocop: disable Rails/SaveBang
+ describe '#event_hash' do
+ it 'associates the same stage event hash record' do
+ first = create(factory)
+ second = create(factory)
+
+ expect(first.stage_event_hash_id).to eq(second.stage_event_hash_id)
+ end
+
+ it 'does not introduce duplicated stage event hash records' do
+ expect do
+ create(factory)
+ create(factory)
+ end.to change { Analytics::CycleAnalytics::StageEventHash.count }.from(0).to(1)
+ end
+
+ it 'creates different hash record for different event configurations' do
+ expect do
+ create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_stage_end)
+ create(factory, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged)
+ end.to change { Analytics::CycleAnalytics::StageEventHash.count }.from(0).to(2)
+ end
+
+ context 'when the stage event hash changes' do
+ let(:stage) { create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_stage_end) }
+
+ it 'deletes the old, unused stage event hash record' do
+ old_stage_event_hash = stage.stage_event_hash
+
+ stage.update!(end_event_identifier: :issue_deployed_to_production)
+
+ expect(stage.stage_event_hash_id).not_to eq(old_stage_event_hash.id)
+
+ old_stage_event_hash_from_db = Analytics::CycleAnalytics::StageEventHash.find_by_id(old_stage_event_hash.id)
+ expect(old_stage_event_hash_from_db).to be_nil
+ end
+
+ it 'does not delete used stage event hash record' do
+ other_stage = create(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_stage_end)
+
+ stage.update!(end_event_identifier: :issue_deployed_to_production)
+
+ expect(stage.stage_event_hash_id).not_to eq(other_stage.stage_event_hash_id)
+
+ old_stage_event_hash_from_db = Analytics::CycleAnalytics::StageEventHash.find_by_id(other_stage.stage_event_hash_id)
+ expect(old_stage_event_hash_from_db).not_to be_nil
+ end
+ end
+
+ context 'when the stage events hash code does not change' do
+ it 'does not trigger extra query on save' do
+ stage = create(factory, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged)
+
+ expect(Analytics::CycleAnalytics::StageEventHash).not_to receive(:record_id_by_hash_sha256)
+
+ stage.update!(name: 'new title')
+ end
+ end
+ end
+ # rubocop: enable Rails/SaveBang
end
RSpec.shared_examples 'value stream analytics label based stage' do
diff --git a/spec/support/shared_examples/models/mentionable_shared_examples.rb b/spec/support/shared_examples/models/mentionable_shared_examples.rb
index 04630484964..07c5f730e95 100644
--- a/spec/support/shared_examples/models/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/models/mentionable_shared_examples.rb
@@ -207,7 +207,7 @@ RSpec.shared_examples 'an editable mentionable' do
end
RSpec.shared_examples 'mentions in description' do |mentionable_type|
- describe 'when storing user mentions' do
+ shared_examples 'when storing user mentions' do
before do
mentionable.store_mentions!
end
@@ -238,10 +238,26 @@ RSpec.shared_examples 'mentions in description' do |mentionable_type|
end
end
end
+
+ context 'when store_mentions_without_subtransaction is enabled' do
+ before do
+ stub_feature_flags(store_mentions_without_subtransaction: true)
+ end
+
+ it_behaves_like 'when storing user mentions'
+ end
+
+ context 'when store_mentions_without_subtransaction is disabled' do
+ before do
+ stub_feature_flags(store_mentions_without_subtransaction: false)
+ end
+
+ it_behaves_like 'when storing user mentions'
+ end
end
RSpec.shared_examples 'mentions in notes' do |mentionable_type|
- context 'when mentionable notes contain mentions' do
+ shared_examples 'when mentionable notes contain mentions' do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:group) { create(:group) }
@@ -261,6 +277,22 @@ RSpec.shared_examples 'mentions in notes' do |mentionable_type|
expect(mentionable.referenced_groups(user)).to eq [group]
end
end
+
+ context 'when store_mentions_without_subtransaction is enabled' do
+ before do
+ stub_feature_flags(store_mentions_without_subtransaction: true)
+ end
+
+ it_behaves_like 'when mentionable notes contain mentions'
+ end
+
+ context 'when store_mentions_without_subtransaction is disabled' do
+ before do
+ stub_feature_flags(store_mentions_without_subtransaction: false)
+ end
+
+ it_behaves_like 'when mentionable notes contain mentions'
+ end
end
RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
@@ -278,7 +310,7 @@ RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
context 'when stored user mention contains ids of inexistent records' do
before do
- user_mention = note.send(:model_user_mention)
+ user_mention = note.user_mentions.first
mention_ids = {
mentioned_users_ids: user_mention.mentioned_users_ids.to_a << non_existing_record_id,
mentioned_projects_ids: user_mention.mentioned_projects_ids.to_a << non_existing_record_id,
@@ -302,7 +334,7 @@ RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
let(:group_member) { create(:group_member, user: create(:user), group: private_group) }
before do
- user_mention = note.send(:model_user_mention)
+ user_mention = note.user_mentions.first
mention_ids = {
mentioned_projects_ids: user_mention.mentioned_projects_ids.to_a << private_project.id,
mentioned_groups_ids: user_mention.mentioned_groups_ids.to_a << private_group.id
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
index 5459d17b1df..274fbae3dfd 100644
--- a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -128,10 +128,6 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
it { is_expected.not_to allow_value(12.hours.to_i).for(:valid_time_duration_seconds) }
end
- describe '#signing_keys' do
- it { is_expected.to validate_absence_of(:signing_keys) }
- end
-
describe '#file' do
it { is_expected.not_to validate_presence_of(:file) }
end
@@ -141,7 +137,15 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
end
describe '#file_signature' do
- it { is_expected.to validate_absence_of(:file_signature) }
+ it { is_expected.not_to validate_absence_of(:file_signature) }
+ end
+
+ describe '#signed_file' do
+ it { is_expected.not_to validate_presence_of(:signed_file) }
+ end
+
+ describe '#signed_file_store' do
+ it { is_expected.to validate_presence_of(:signed_file_store) }
end
end
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index 7b591ad84d1..2e01de2ea84 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -22,116 +22,6 @@ RSpec.shared_examples 'UpdateProjectStatistics' do |with_counter_attribute|
it { is_expected.to be_new_record }
- context 'when feature flag efficient_counter_attribute is disabled' do
- before do
- stub_feature_flags(efficient_counter_attribute: false)
- end
-
- context 'when creating' do
- it 'updates the project statistics' do
- delta0 = reload_stat
-
- subject.save!
-
- delta1 = reload_stat
-
- expect(delta1).to eq(delta0 + read_attribute)
- expect(delta1).to be > delta0
- end
-
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
-
- subject.save!
- end
- end
-
- context 'when updating' do
- let(:delta) { 42 }
-
- before do
- subject.save!
- end
-
- it 'updates project statistics' do
- expect(ProjectStatistics)
- .to receive(:increment_statistic)
- .and_call_original
-
- subject.write_attribute(statistic_attribute, read_attribute + delta)
-
- expect { subject.save! }
- .to change { reload_stat }
- .by(delta)
- end
-
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
-
- subject.write_attribute(statistic_attribute, read_attribute + delta)
- subject.save!
- end
-
- it 'avoids N + 1 queries' do
- subject.write_attribute(statistic_attribute, read_attribute + delta)
-
- control_count = ActiveRecord::QueryRecorder.new do
- subject.save!
- end
-
- subject.write_attribute(statistic_attribute, read_attribute + delta)
-
- expect do
- subject.save!
- end.not_to exceed_query_limit(control_count)
- end
- end
-
- context 'when destroying' do
- before do
- subject.save!
- end
-
- it 'updates the project statistics' do
- delta0 = reload_stat
-
- subject.destroy!
-
- delta1 = reload_stat
-
- expect(delta1).to eq(delta0 - read_attribute)
- expect(delta1).to be < delta0
- end
-
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
-
- subject.destroy!
- end
-
- context 'when it is destroyed from the project level' do
- it 'does not update the project statistics' do
- expect(ProjectStatistics)
- .not_to receive(:increment_statistic)
-
- project.update!(pending_delete: true)
- project.destroy!
- end
-
- it 'does not schedule a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
-
- project.update!(pending_delete: true)
- project.destroy!
- end
- end
- end
- end
-
def expect_flush_counter_increments_worker_performed
expect(FlushCounterIncrementsWorker)
.to receive(:perform_in)
diff --git a/spec/support/shared_examples/namespaces/linear_traversal_examples.rb b/spec/support/shared_examples/namespaces/linear_traversal_examples.rb
deleted file mode 100644
index 2fd90c36953..00000000000
--- a/spec/support/shared_examples/namespaces/linear_traversal_examples.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-# Traversal examples common to linear and recursive methods are in
-# spec/support/shared_examples/namespaces/traversal_examples.rb
-
-RSpec.shared_examples 'linear namespace traversal' do
- context 'when use_traversal_ids feature flag is enabled' do
- before do
- stub_feature_flags(use_traversal_ids: true)
- end
-
- context 'scopes' do
- describe '.as_ids' do
- let_it_be(:namespace1) { create(:group) }
- let_it_be(:namespace2) { create(:group) }
-
- subject { Namespace.where(id: [namespace1, namespace2]).as_ids.pluck(:id) }
-
- it { is_expected.to contain_exactly(namespace1.id, namespace2.id) }
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/namespaces/traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb
index f09634556c3..d126b242fb0 100644
--- a/spec/support/shared_examples/namespaces/traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_examples.rb
@@ -55,12 +55,34 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#ancestors' do
- it 'returns the correct ancestors' do
+ before do
# #reload is called to make sure traversal_ids are reloaded
- expect(very_deep_nested_group.reload.ancestors).to contain_exactly(group, nested_group, deep_nested_group)
- expect(deep_nested_group.reload.ancestors).to contain_exactly(group, nested_group)
- expect(nested_group.reload.ancestors).to contain_exactly(group)
- expect(group.reload.ancestors).to eq([])
+ reload_models(group, nested_group, deep_nested_group, very_deep_nested_group)
+ end
+
+ it 'returns the correct ancestors' do
+ expect(very_deep_nested_group.ancestors).to contain_exactly(group, nested_group, deep_nested_group)
+ expect(deep_nested_group.ancestors).to contain_exactly(group, nested_group)
+ expect(nested_group.ancestors).to contain_exactly(group)
+ expect(group.ancestors).to eq([])
+ end
+
+ context 'with asc hierarchy_order' do
+ it 'returns the correct ancestors' do
+ expect(very_deep_nested_group.ancestors(hierarchy_order: :asc)).to eq [deep_nested_group, nested_group, group]
+ expect(deep_nested_group.ancestors(hierarchy_order: :asc)).to eq [nested_group, group]
+ expect(nested_group.ancestors(hierarchy_order: :asc)).to eq [group]
+ expect(group.ancestors(hierarchy_order: :asc)).to eq([])
+ end
+ end
+
+ context 'with desc hierarchy_order' do
+ it 'returns the correct ancestors' do
+ expect(very_deep_nested_group.ancestors(hierarchy_order: :desc)).to eq [group, nested_group, deep_nested_group]
+ expect(deep_nested_group.ancestors(hierarchy_order: :desc)).to eq [group, nested_group]
+ expect(nested_group.ancestors(hierarchy_order: :desc)).to eq [group]
+ expect(group.ancestors(hierarchy_order: :desc)).to eq([])
+ end
end
describe '#recursive_ancestors' do
@@ -78,6 +100,24 @@ RSpec.shared_examples 'namespace traversal' do
expect(group.ancestor_ids).to be_empty
end
+ context 'with asc hierarchy_order' do
+ it 'returns the correct ancestor ids' do
+ expect(very_deep_nested_group.ancestor_ids(hierarchy_order: :asc)).to eq [deep_nested_group.id, nested_group.id, group.id]
+ expect(deep_nested_group.ancestor_ids(hierarchy_order: :asc)).to eq [nested_group.id, group.id]
+ expect(nested_group.ancestor_ids(hierarchy_order: :asc)).to eq [group.id]
+ expect(group.ancestor_ids(hierarchy_order: :asc)).to eq([])
+ end
+ end
+
+ context 'with desc hierarchy_order' do
+ it 'returns the correct ancestor ids' do
+ expect(very_deep_nested_group.ancestor_ids(hierarchy_order: :desc)).to eq [group.id, nested_group.id, deep_nested_group.id]
+ expect(deep_nested_group.ancestor_ids(hierarchy_order: :desc)).to eq [group.id, nested_group.id]
+ expect(nested_group.ancestor_ids(hierarchy_order: :desc)).to eq [group.id]
+ expect(group.ancestor_ids(hierarchy_order: :desc)).to eq([])
+ end
+ end
+
describe '#recursive_ancestor_ids' do
let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
@@ -93,6 +133,24 @@ RSpec.shared_examples 'namespace traversal' do
expect(group.self_and_ancestors).to contain_exactly(group)
end
+ context 'with asc hierarchy_order' do
+ it 'returns the correct ancestors' do
+ expect(very_deep_nested_group.self_and_ancestors(hierarchy_order: :asc)).to eq [very_deep_nested_group, deep_nested_group, nested_group, group]
+ expect(deep_nested_group.self_and_ancestors(hierarchy_order: :asc)).to eq [deep_nested_group, nested_group, group]
+ expect(nested_group.self_and_ancestors(hierarchy_order: :asc)).to eq [nested_group, group]
+ expect(group.self_and_ancestors(hierarchy_order: :asc)).to eq([group])
+ end
+ end
+
+ context 'with desc hierarchy_order' do
+ it 'returns the correct ancestors' do
+ expect(very_deep_nested_group.self_and_ancestors(hierarchy_order: :desc)).to eq [group, nested_group, deep_nested_group, very_deep_nested_group]
+ expect(deep_nested_group.self_and_ancestors(hierarchy_order: :desc)).to eq [group, nested_group, deep_nested_group]
+ expect(nested_group.self_and_ancestors(hierarchy_order: :desc)).to eq [group, nested_group]
+ expect(group.self_and_ancestors(hierarchy_order: :desc)).to eq([group])
+ end
+ end
+
describe '#recursive_self_and_ancestors' do
let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
@@ -108,6 +166,24 @@ RSpec.shared_examples 'namespace traversal' do
expect(group.self_and_ancestor_ids).to contain_exactly(group.id)
end
+ context 'with asc hierarchy_order' do
+ it 'returns the correct ancestor ids' do
+ expect(very_deep_nested_group.self_and_ancestor_ids(hierarchy_order: :asc)).to eq [very_deep_nested_group.id, deep_nested_group.id, nested_group.id, group.id]
+ expect(deep_nested_group.self_and_ancestor_ids(hierarchy_order: :asc)).to eq [deep_nested_group.id, nested_group.id, group.id]
+ expect(nested_group.self_and_ancestor_ids(hierarchy_order: :asc)).to eq [nested_group.id, group.id]
+ expect(group.self_and_ancestor_ids(hierarchy_order: :asc)).to eq([group.id])
+ end
+ end
+
+ context 'with desc hierarchy_order' do
+ it 'returns the correct ancestor ids' do
+ expect(very_deep_nested_group.self_and_ancestor_ids(hierarchy_order: :desc)).to eq [group.id, nested_group.id, deep_nested_group.id, very_deep_nested_group.id]
+ expect(deep_nested_group.self_and_ancestor_ids(hierarchy_order: :desc)).to eq [group.id, nested_group.id, deep_nested_group.id]
+ expect(nested_group.self_and_ancestor_ids(hierarchy_order: :desc)).to eq [group.id, nested_group.id]
+ expect(group.self_and_ancestor_ids(hierarchy_order: :desc)).to eq([group.id])
+ end
+ end
+
describe '#recursive_self_and_ancestor_ids' do
let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
new file mode 100644
index 00000000000..4d328c03641
--- /dev/null
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'namespace traversal scopes' do
+ # Hierarchy 1
+ let_it_be(:group_1) { create(:group) }
+ let_it_be(:nested_group_1) { create(:group, parent: group_1) }
+ let_it_be(:deep_nested_group_1) { create(:group, parent: nested_group_1) }
+
+ # Hierarchy 2
+ let_it_be(:group_2) { create(:group) }
+ let_it_be(:nested_group_2) { create(:group, parent: group_2) }
+ let_it_be(:deep_nested_group_2) { create(:group, parent: nested_group_2) }
+
+ # All groups
+ let_it_be(:groups) do
+ [
+ group_1, nested_group_1, deep_nested_group_1,
+ group_2, nested_group_2, deep_nested_group_2
+ ]
+ end
+
+ describe '.as_ids' do
+ subject { described_class.where(id: [group_1, group_2]).as_ids.pluck(:id) }
+
+ it { is_expected.to contain_exactly(group_1.id, group_2.id) }
+ end
+
+ describe '.without_sti_condition' do
+ subject { described_class.without_sti_condition }
+
+ it { expect(subject.where_values_hash).not_to have_key(:type) }
+ end
+
+ describe '.self_and_descendants' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_descendants }
+
+ it { is_expected.to contain_exactly(nested_group_1, deep_nested_group_1, nested_group_2, deep_nested_group_2) }
+
+ context 'with duplicate descendants' do
+ subject { described_class.where(id: [group_1, group_2, nested_group_1]).self_and_descendants }
+
+ it { is_expected.to match_array(groups) }
+ end
+
+ context 'when include_self is false' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_descendants(include_self: false) }
+
+ it { is_expected.to contain_exactly(deep_nested_group_1, deep_nested_group_2) }
+ end
+ end
+
+ describe '.self_and_descendant_ids' do
+ subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_descendant_ids.pluck(:id) }
+
+ it { is_expected.to contain_exactly(nested_group_1.id, deep_nested_group_1.id, nested_group_2.id, deep_nested_group_2.id) }
+
+ context 'when include_self is false' do
+ subject do
+ described_class
+ .where(id: [nested_group_1, nested_group_2])
+ .self_and_descendant_ids(include_self: false)
+ .pluck(:id)
+ end
+
+ it { is_expected.to contain_exactly(deep_nested_group_1.id, deep_nested_group_2.id) }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/policies/project_policy_shared_examples.rb b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
index 013c9b61b99..a4243db6bc9 100644
--- a/spec/support/shared_examples/policies/project_policy_shared_examples.rb
+++ b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
@@ -330,3 +330,18 @@ RSpec.shared_examples 'project policies as admin without admin mode' do
end
end
end
+
+RSpec.shared_examples 'package access with repository disabled' do
+ context 'when repository is disabled' do
+ before do
+ project.project_feature.update!(
+ # Disable merge_requests and builds as well, since merge_requests and
+ # builds cannot have higher visibility than repository.
+ merge_requests_access_level: ProjectFeature::DISABLED,
+ builds_access_level: ProjectFeature::DISABLED,
+ repository_access_level: ProjectFeature::DISABLED)
+ end
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index 1f68dd7a382..a3ed74085fb 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -4,7 +4,7 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
include_context 'workhorse headers'
before do
- stub_feature_flags(debian_packages: true)
+ stub_feature_flags(debian_packages: true, debian_group_packages: true)
end
let_it_be(:private_container, freeze: can_freeze) { create(container_type, :private) }
@@ -29,6 +29,8 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
let_it_be(:public_project) { create(:project, :public, group: public_container) }
let_it_be(:private_project_distribution) { create(:debian_project_distribution, container: private_project, codename: 'existing-codename') }
let_it_be(:public_project_distribution) { create(:debian_project_distribution, container: public_project, codename: 'existing-codename') }
+
+ let(:project) { { private: private_project, public: public_project }[visibility_level] }
else
let_it_be(:private_project) { private_container }
let_it_be(:public_project) { public_container }
@@ -45,12 +47,8 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
let(:architecture) { { private: private_architecture, public: public_architecture }[visibility_level] }
let(:component) { { private: private_component, public: public_component }[visibility_level] }
let(:component_file) { { private: private_component_file, public: public_component_file }[visibility_level] }
-
- let(:source_package) { 'sample' }
- let(:letter) { source_package[0..2] == 'lib' ? source_package[0..3] : source_package[0] }
- let(:package_name) { 'libsample0' }
- let(:package_version) { '1.2.3~alpha2' }
- let(:file_name) { "#{package_name}_#{package_version}_#{architecture.name}.deb" }
+ let(:package) { { private: private_package, public: public_package }[visibility_level] }
+ let(:letter) { package.name[0..2] == 'lib' ? package.name[0..3] : package.name[0] }
let(:method) { :get }
@@ -94,6 +92,10 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
end
end
+RSpec.shared_context 'with file_name' do |file_name|
+ let(:file_name) { file_name }
+end
+
RSpec.shared_context 'Debian repository auth headers' do |user_role, user_token, auth_method = :token|
let(:token) { user_token ? personal_access_token.token : 'wrong' }
diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
index c15c59e1a1d..0390e60747f 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
@@ -46,6 +46,8 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
end
shared_examples 'handling all conditions' do
+ include_context 'dependency proxy helpers context'
+
where(:auth, :package_name_type, :request_forward, :visibility, :user_role, :expected_result, :expected_status) do
nil | :scoped_naming_convention | true | :public | nil | :accept | :ok
nil | :scoped_naming_convention | false | :public | nil | :accept | :ok
@@ -243,7 +245,7 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
project.send("add_#{user_role}", user) if user_role
project.update!(visibility: visibility.to_s)
package.update!(name: package_name) unless package_name == 'non-existing-package'
- stub_application_setting(npm_package_requests_forwarding: request_forward)
+ allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
end
example_name = "#{params[:expected_result]} metadata request"
diff --git a/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb
index e6b3dc74b74..86b6975bf9f 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb
@@ -10,9 +10,10 @@ end
RSpec.shared_examples 'accept package tags request' do |status:|
using RSpec::Parameterized::TableSyntax
+ include_context 'dependency proxy helpers context'
before do
- stub_application_setting(npm_package_requests_forwarding: false)
+ allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: false)
end
context 'with valid package name' do
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index 8a351226123..ed6d9ed43c8 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -228,6 +228,35 @@ RSpec.shared_examples 'pypi simple API endpoint' do
it_behaves_like 'PyPI package versions', :developer, :success
end
+
+ context 'package request forward' do
+ include_context 'dependency proxy helpers context'
+
+ where(:forward, :package_in_project, :shared_examples_name, :expected_status) do
+ true | true | 'PyPI package versions' | :success
+ true | false | 'process PyPI api request' | :redirect
+ false | true | 'PyPI package versions' | :success
+ false | false | 'process PyPI api request' | :not_found
+ end
+
+ with_them do
+ let_it_be(:package) { create(:pypi_package, project: project, name: 'foobar') }
+
+ let(:package_name) do
+ if package_in_project
+ 'foobar'
+ else
+ 'barfoo'
+ end
+ end
+
+ before do
+ allow_fetch_application_setting(attribute: "pypi_package_requests_forwarding", return_value: forward)
+ end
+
+ it_behaves_like params[:shared_examples_name], :reporter, params[:expected_status]
+ end
+ end
end
RSpec.shared_examples 'pypi file download endpoint' do
diff --git a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
index afc902dd184..104e91add8b 100644
--- a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
@@ -128,17 +128,25 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
if issuable_name == 'merge_request'
it 'calls update service with :use_specialized_service param' do
- expect(::MergeRequests::UpdateService).to receive(:new).with(project: project, current_user: user, params: hash_including(use_specialized_service: true))
-
- post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '2h' }
+ expect(::MergeRequests::UpdateService).to receive(:new).with(
+ project: project,
+ current_user: user,
+ params: hash_including(
+ use_specialized_service: true,
+ spend_time: hash_including(duration: 7200, summary: 'summary')))
+
+ post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '2h', summary: 'summary' }
end
end
if issuable_name == 'issue'
it 'calls update service without :use_specialized_service param' do
- expect(::Issues::UpdateService).to receive(:new).with(project: project, current_user: user, params: hash_not_including(use_specialized_service: true))
+ expect(::Issues::UpdateService).to receive(:new).with(
+ project: project,
+ current_user: user,
+ params: { spend_time: { duration: 3600, summary: 'summary', user_id: user.id } })
- post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '2h' }
+ post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '1h', summary: 'summary' }
end
end
end
diff --git a/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
index 7608f1c7f8a..32adf98969c 100644
--- a/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
@@ -63,3 +63,19 @@ end
RSpec.shared_examples 'diff file discussion entity' do
it_behaves_like 'diff file base entity'
end
+
+RSpec.shared_examples 'diff file with conflict_type' do
+ describe '#conflict_type' do
+ it 'returns nil by default' do
+ expect(subject[:conflict_type]).to be_nil
+ end
+
+ context 'when there is matching conflict file' do
+ let(:options) { { conflicts: { diff_file.new_path => double(diff_lines_for_serializer: [], conflict_type: :both_modified) } } }
+
+ it 'returns false' do
+ expect(subject[:conflict_type]).to eq(:both_modified)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
index 7d4fbeea0dc..d9b837258ce 100644
--- a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
@@ -100,8 +100,8 @@ RSpec.shared_examples 'issues move service' do |group|
create(:labeled_issue, project: project, labels: [bug, development], assignees: [assignee])
end
- it 'returns false' do
- expect(described_class.new(parent, user, params).execute(issue)).to eq false
+ it 'returns nil' do
+ expect(described_class.new(parent, user, params).execute(issue)).to be_nil
end
it 'keeps issues labels' do
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index eafcbd77040..f6e25ee6647 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -69,6 +69,10 @@ RSpec.shared_examples 'a browsable' do
end
RSpec.shared_examples 'an accessible' do
+ before do
+ stub_feature_flags(container_registry_migration_phase1: false)
+ end
+
let(:access) do
[{ 'type' => 'repository',
'name' => project.full_path,
@@ -203,9 +207,7 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
- context 'for private project' do
- let_it_be(:project) { create(:project) }
-
+ shared_examples 'private project' do
context 'allow to use scope-less authentication' do
it_behaves_like 'a valid token'
end
@@ -345,8 +347,20 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
- context 'for public project' do
- let_it_be(:project) { create(:project, :public) }
+ context 'for private project' do
+ let_it_be_with_reload(:project) { create(:project) }
+
+ it_behaves_like 'private project'
+ end
+
+ context 'for public project with private container registry' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_private) }
+
+ it_behaves_like 'private project'
+ end
+
+ context 'for public project with container_registry `enabled`' do
+ let_it_be(:project) { create(:project, :public, :container_registry_enabled) }
context 'allow anyone to pull images' do
let(:current_params) do
@@ -394,8 +408,8 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
- context 'for internal project' do
- let_it_be(:project) { create(:project, :internal) }
+ context 'for internal project with container_registry `enabled`' do
+ let_it_be(:project) { create(:project, :internal, :container_registry_enabled) }
context 'for internal user' do
context 'allow anyone to pull images' do
@@ -470,6 +484,12 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
end
+
+ context 'for internal project with private container registry' do
+ let_it_be_with_reload(:project) { create(:project, :internal, :container_registry_private) }
+
+ it_behaves_like 'private project'
+ end
end
context 'delete authorized as maintainer' do
@@ -630,12 +650,8 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
- context 'for project with private container registry' do
- let_it_be(:project, reload: true) { create(:project, :public) }
-
- before do
- project.project_feature.update!(container_registry_access_level: ProjectFeature::PRIVATE)
- end
+ context 'for public project with private container registry' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_private) }
it_behaves_like 'pullable for being team member'
@@ -675,11 +691,7 @@ RSpec.shared_examples 'a container registry auth service' do
end
context 'for project without container registry' do
- let_it_be(:project) { create(:project, :public, container_registry_enabled: false) }
-
- before do
- project.update!(container_registry_enabled: false)
- end
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_disabled) }
context 'disallow when pulling' do
let(:current_params) do
@@ -719,12 +731,16 @@ RSpec.shared_examples 'a container registry auth service' do
context 'support for multiple scopes' do
let_it_be(:internal_project) { create(:project, :internal) }
let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:public_project_private_container_registry) { create(:project, :public, :container_registry_private) }
let(:current_params) do
{
scopes: [
"repository:#{internal_project.full_path}:pull",
- "repository:#{private_project.full_path}:pull"
+ "repository:#{private_project.full_path}:pull",
+ "repository:#{public_project.full_path}:pull",
+ "repository:#{public_project_private_container_registry.full_path}:pull"
]
}
end
@@ -744,13 +760,19 @@ RSpec.shared_examples 'a container registry auth service' do
'actions' => ['pull'] },
{ 'type' => 'repository',
'name' => private_project.full_path,
+ 'actions' => ['pull'] },
+ { 'type' => 'repository',
+ 'name' => public_project.full_path,
+ 'actions' => ['pull'] },
+ { 'type' => 'repository',
+ 'name' => public_project_private_container_registry.full_path,
'actions' => ['pull'] }
]
end
end
end
- context 'user only has access to internal project' do
+ context 'user only has access to internal and public projects' do
let_it_be(:current_user) { create(:user) }
it_behaves_like 'a browsable' do
@@ -758,16 +780,35 @@ RSpec.shared_examples 'a container registry auth service' do
[
{ 'type' => 'repository',
'name' => internal_project.full_path,
+ 'actions' => ['pull'] },
+ { 'type' => 'repository',
+ 'name' => public_project.full_path,
'actions' => ['pull'] }
]
end
end
end
- context 'anonymous access is rejected' do
+ context 'anonymous user has access only to public project' do
let(:current_user) { nil }
- it_behaves_like 'a forbidden'
+ it_behaves_like 'a browsable' do
+ let(:access) do
+ [
+ { 'type' => 'repository',
+ 'name' => public_project.full_path,
+ 'actions' => ['pull'] }
+ ]
+ end
+ end
+
+ context 'with no public container registry' do
+ before do
+ public_project.project_feature.update_column(:container_registry_access_level, ProjectFeature::PRIVATE)
+ end
+
+ it_behaves_like 'a forbidden'
+ end
end
end
@@ -796,8 +837,8 @@ RSpec.shared_examples 'a container registry auth service' do
it_behaves_like 'a forbidden'
end
- context 'for public project' do
- let_it_be(:project) { create(:project, :public) }
+ context 'for public project with container registry `enabled`' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_enabled) }
context 'when pulling and pushing' do
let(:current_params) do
@@ -818,6 +859,19 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
+ context 'for public project with container registry `private`' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_private) }
+
+ context 'when pulling and pushing' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:pull,push"] }
+ end
+
+ it_behaves_like 'a forbidden'
+ it_behaves_like 'not a container repository factory'
+ end
+ end
+
context 'for registry catalog' do
let(:current_params) do
{ scopes: ["registry:catalog:*"] }
@@ -830,15 +884,15 @@ RSpec.shared_examples 'a container registry auth service' do
context 'for deploy tokens' do
let(:current_params) do
- { scopes: ["repository:#{project.full_path}:pull"] }
+ { scopes: ["repository:#{project.full_path}:pull"], deploy_token: deploy_token }
end
context 'when deploy token has read and write registry as scopes' do
- let(:current_user) { create(:deploy_token, write_registry: true, projects: [project]) }
+ let(:deploy_token) { create(:deploy_token, write_registry: true, projects: [project]) }
shared_examples 'able to login' do
context 'registry provides read_container_image authentication_abilities' do
- let(:current_params) { {} }
+ let(:current_params) { { deploy_token: deploy_token } }
let(:authentication_abilities) { [:read_container_image] }
it_behaves_like 'an authenticated'
@@ -854,7 +908,7 @@ RSpec.shared_examples 'a container registry auth service' do
context 'when pushing' do
let(:current_params) do
- { scopes: ["repository:#{project.full_path}:push"] }
+ { scopes: ["repository:#{project.full_path}:push"], deploy_token: deploy_token }
end
it_behaves_like 'a pushable'
@@ -872,7 +926,7 @@ RSpec.shared_examples 'a container registry auth service' do
context 'when pushing' do
let(:current_params) do
- { scopes: ["repository:#{project.full_path}:push"] }
+ { scopes: ["repository:#{project.full_path}:push"], deploy_token: deploy_token }
end
it_behaves_like 'a pushable'
@@ -890,7 +944,25 @@ RSpec.shared_examples 'a container registry auth service' do
context 'when pushing' do
let(:current_params) do
- { scopes: ["repository:#{project.full_path}:push"] }
+ { scopes: ["repository:#{project.full_path}:push"], deploy_token: deploy_token }
+ end
+
+ it_behaves_like 'a pushable'
+ end
+
+ it_behaves_like 'able to login'
+ end
+
+ context 'for public project with private container registry' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_private) }
+
+ context 'when pulling' do
+ it_behaves_like 'a pullable'
+ end
+
+ context 'when pushing' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:push"], deploy_token: deploy_token }
end
it_behaves_like 'a pushable'
@@ -901,26 +973,26 @@ RSpec.shared_examples 'a container registry auth service' do
end
context 'when deploy token does not have read_registry scope' do
- let(:current_user) { create(:deploy_token, projects: [project], read_registry: false) }
+ let(:deploy_token) do
+ create(:deploy_token, projects: [project], read_registry: false)
+ end
shared_examples 'unable to login' do
context 'registry provides no container authentication_abilities' do
- let(:current_params) { {} }
let(:authentication_abilities) { [] }
it_behaves_like 'a forbidden'
end
context 'registry provides inapplicable container authentication_abilities' do
- let(:current_params) { {} }
let(:authentication_abilities) { [:download_code] }
it_behaves_like 'a forbidden'
end
end
- context 'for public project' do
- let_it_be(:project) { create(:project, :public) }
+ context 'for public project with container registry `enabled`' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_enabled) }
context 'when pulling' do
it_behaves_like 'a pullable'
@@ -929,6 +1001,16 @@ RSpec.shared_examples 'a container registry auth service' do
it_behaves_like 'unable to login'
end
+ context 'for public project with container registry `private`' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_private) }
+
+ context 'when pulling' do
+ it_behaves_like 'an inaccessible'
+ end
+
+ it_behaves_like 'unable to login'
+ end
+
context 'for internal project' do
let_it_be(:project) { create(:project, :internal) }
@@ -958,16 +1040,24 @@ RSpec.shared_examples 'a container registry auth service' do
end
context 'when deploy token is not related to the project' do
- let_it_be(:current_user) { create(:deploy_token, read_registry: false) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_registry: false) }
- context 'for public project' do
- let_it_be(:project) { create(:project, :public) }
+ context 'for public project with container registry `enabled`' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_enabled) }
context 'when pulling' do
it_behaves_like 'a pullable'
end
end
+ context 'for public project with container registry `private`' do
+ let_it_be_with_reload(:project) { create(:project, :public, :container_registry_private) }
+
+ context 'when pulling' do
+ it_behaves_like 'an inaccessible'
+ end
+ end
+
context 'for internal project' do
let_it_be(:project) { create(:project, :internal) }
@@ -986,14 +1076,20 @@ RSpec.shared_examples 'a container registry auth service' do
end
context 'when deploy token has been revoked' do
- let(:current_user) { create(:deploy_token, :revoked, projects: [project]) }
+ let(:deploy_token) { create(:deploy_token, :revoked, projects: [project]) }
- context 'for public project' do
- let_it_be(:project) { create(:project, :public) }
+ context 'for public project with container registry `enabled`' do
+ let_it_be(:project) { create(:project, :public, :container_registry_enabled) }
it_behaves_like 'a pullable'
end
+ context 'for public project with container registry `private`' do
+ let_it_be(:project) { create(:project, :public, :container_registry_private) }
+
+ it_behaves_like 'an inaccessible'
+ end
+
context 'for internal project' do
let_it_be(:project) { create(:project, :internal) }
diff --git a/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb b/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb
new file mode 100644
index 00000000000..56a6d24d557
--- /dev/null
+++ b/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a service that handles Jira API errors' do
+ include AfterNextHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ where(:exception_class, :exception_message, :expected_message) do
+ Errno::ECONNRESET | '' | 'A connection error occurred'
+ Errno::ECONNREFUSED | '' | 'A connection error occurred'
+ Errno::ETIMEDOUT | '' | 'A timeout error occurred'
+ Timeout::Error | '' | 'A timeout error occurred'
+ URI::InvalidURIError | '' | 'The Jira API URL'
+ SocketError | '' | 'The Jira API URL'
+ OpenSSL::SSL::SSLError | 'foo' | 'An SSL error occurred while connecting to Jira: foo'
+ JIRA::HTTPError | 'Unauthorized' | 'The credentials for accessing Jira are not valid'
+ JIRA::HTTPError | 'Forbidden' | 'The credentials for accessing Jira are not allowed'
+ JIRA::HTTPError | 'Bad Request' | 'An error occurred while requesting data from Jira'
+ JIRA::HTTPError | 'Foo' | 'An error occurred while requesting data from Jira.'
+ JIRA::HTTPError | '{"errorMessages":["foo","bar"]}' | 'An error occurred while requesting data from Jira: foo and bar'
+ JIRA::HTTPError | '{"errorMessages":[""]}' | 'An error occurred while requesting data from Jira.'
+ end
+
+ with_them do
+ it 'handles the error' do
+ stub_client_and_raise(exception_class, exception_message)
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to include(expected_message)
+ end
+ end
+
+ context 'when the JSON in JIRA::HTTPError is unsafe' do
+ before do
+ stub_client_and_raise(JIRA::HTTPError, error)
+ end
+
+ context 'when JSON is malformed' do
+ let(:error) { '{"errorMessages":' }
+
+ it 'returns the default error message' do
+ expect(subject.message).to eq('An error occurred while requesting data from Jira. Check your Jira integration configuration and try again.')
+ end
+ end
+
+ context 'when JSON contains tags' do
+ let(:error) { '{"errorMessages":["<script>alert(true)</script>foo"]}' }
+
+ it 'sanitizes it' do
+ expect(subject.message).to eq('An error occurred while requesting data from Jira: foo. Check your Jira integration configuration and try again.')
+ end
+ end
+ end
+
+ it 'allows unknown exception classes to bubble' do
+ stub_client_and_raise(StandardError)
+
+ expect { subject }.to raise_exception(StandardError)
+ end
+
+ it 'logs the error' do
+ stub_client_and_raise(Timeout::Error, 'foo')
+
+ expect(Gitlab::ProjectServiceLogger).to receive(:error).with(
+ hash_including(
+ client_url: be_present,
+ message: 'Error sending message',
+ service_class: described_class.name,
+ error: hash_including(
+ exception_class: Timeout::Error.name,
+ exception_message: 'foo',
+ exception_backtrace: be_present
+ )
+ )
+ )
+ expect(subject).to be_error
+ end
+
+ def stub_client_and_raise(exception_class, message = '')
+ # `JIRA::HTTPError` classes take a response from the JIRA API, rather than a `String`.
+ message = double(body: message) if exception_class == JIRA::HTTPError
+
+ allow_next(JIRA::Client).to receive(:get).and_raise(exception_class, message)
+ end
+end
diff --git a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb
index 9ffeba1b1d0..c979fdc2bb0 100644
--- a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb
@@ -1,165 +1,259 @@
# frozen_string_literal: true
RSpec.shared_examples 'Generate Debian Distribution and component files' do
- let_it_be(:component_main) { create("debian_#{container_type}_component", distribution: distribution, name: 'main') }
- let_it_be(:component_contrib) { create("debian_#{container_type}_component", distribution: distribution, name: 'contrib') }
-
- let_it_be(:architecture_all) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
- let_it_be(:architecture_amd64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'amd64') }
- let_it_be(:architecture_arm64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'arm64') }
-
- let_it_be(:component_file1) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, updated_at: '2020-01-24T08:00:00Z', file_sha256: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', file_md5: 'd41d8cd98f00b204e9800998ecf8427e', file_fixture: nil, size: 0) } # updated
- let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_all, updated_at: '2020-01-24T09:00:00Z', file_sha256: 'a') } # destroyed
- let_it_be(:component_file3) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_amd64, updated_at: '2020-01-24T10:54:59Z', file_sha256: 'b') } # destroyed, 1 second before last generation
- let_it_be(:component_file4) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, updated_at: '2020-01-24T10:55:00Z', file_sha256: 'c') } # kept, last generation
- let_it_be(:component_file5) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, updated_at: '2020-01-24T10:55:00Z', file_sha256: 'd') } # kept, last generation
- let_it_be(:component_file6) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_amd64, updated_at: '2020-01-25T15:17:18Z', file_sha256: 'e') } # kept, less than 1 hour ago
-
- def check_component_file(release_date, component_name, component_file_type, architecture_name, expected_content)
- component_file = distribution
- .component_files
- .with_component_name(component_name)
- .with_file_type(component_file_type)
- .with_architecture_name(architecture_name)
- .order_updated_asc
- .last
-
- expect(component_file).not_to be_nil
- expect(component_file.updated_at).to eq(release_date)
-
- unless expected_content.nil?
- component_file.file.use_file do |file_path|
- expect(File.read(file_path)).to eq(expected_content)
- end
+ def check_release_files(expected_release_content)
+ distribution.reload
+
+ distribution.file.use_file do |file_path|
+ expect(File.read(file_path)).to eq(expected_release_content)
+ end
+
+ expect(distribution.file_signature).to start_with("-----BEGIN PGP SIGNATURE-----\n")
+ expect(distribution.file_signature).to end_with("\n-----END PGP SIGNATURE-----\n")
+
+ distribution.signed_file.use_file do |file_path|
+ expect(File.read(file_path)).to start_with("-----BEGIN PGP SIGNED MESSAGE-----\nHash: SHA512\n\n#{expected_release_content}-----BEGIN PGP SIGNATURE-----\n")
+ expect(File.read(file_path)).to end_with("\n-----END PGP SIGNATURE-----\n")
end
end
- it 'generates Debian distribution and component files', :aggregate_failures do
- current_time = Time.utc(2020, 01, 25, 15, 17, 18, 123456)
-
- travel_to(current_time) do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- initial_count = 6
- destroyed_count = 2
- # updated_count = 1
- created_count = 5
-
- expect { subject }
- .to not_change { Packages::Package.count }
- .and not_change { Packages::PackageFile.count }
- .and change { distribution.reload.updated_at }.to(current_time.round)
- .and change { distribution.component_files.reset.count }.from(initial_count).to(initial_count - destroyed_count + created_count)
- .and change { component_file1.reload.updated_at }.to(current_time.round)
-
- debs = package.package_files.with_debian_file_type(:deb).preload_debian_file_metadata.to_a
- pool_prefix = "pool/unstable/#{project.id}/p/#{package.name}"
- expected_main_amd64_content = <<~EOF
- Package: libsample0
- Source: #{package.name}
- Version: #{package.version}
- Installed-Size: 7
- Maintainer: #{debs[0].debian_fields['Maintainer']}
- Architecture: amd64
- Description: Some mostly empty lib
- Used in GitLab tests.
- .
- Testing another paragraph.
- Multi-Arch: same
- Homepage: #{debs[0].debian_fields['Homepage']}
- Section: libs
- Priority: optional
- Filename: #{pool_prefix}/libsample0_1.2.3~alpha2_amd64.deb
- Size: 409600
- MD5sum: #{debs[0].file_md5}
- SHA256: #{debs[0].file_sha256}
-
- Package: sample-dev
- Source: #{package.name} (#{package.version})
- Version: 1.2.3~binary
- Installed-Size: 7
- Maintainer: #{debs[1].debian_fields['Maintainer']}
- Architecture: amd64
- Depends: libsample0 (= 1.2.3~binary)
- Description: Some mostly empty development files
- Used in GitLab tests.
- .
- Testing another paragraph.
- Multi-Arch: same
- Homepage: #{debs[1].debian_fields['Homepage']}
- Section: libdevel
- Priority: optional
- Filename: #{pool_prefix}/sample-dev_1.2.3~binary_amd64.deb
- Size: 409600
- MD5sum: #{debs[1].file_md5}
- SHA256: #{debs[1].file_sha256}
- EOF
-
- check_component_file(current_time.round, 'main', :packages, 'all', nil)
- check_component_file(current_time.round, 'main', :packages, 'amd64', expected_main_amd64_content)
- check_component_file(current_time.round, 'main', :packages, 'arm64', nil)
-
- check_component_file(current_time.round, 'contrib', :packages, 'all', nil)
- check_component_file(current_time.round, 'contrib', :packages, 'amd64', nil)
- check_component_file(current_time.round, 'contrib', :packages, 'arm64', nil)
-
- main_amd64_size = expected_main_amd64_content.length
- main_amd64_md5sum = Digest::MD5.hexdigest(expected_main_amd64_content)
- main_amd64_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_content)
-
- contrib_all_size = component_file1.size
- contrib_all_md5sum = component_file1.file_md5
- contrib_all_sha256 = component_file1.file_sha256
-
- expected_release_content = <<~EOF
- Codename: unstable
- Date: Sat, 25 Jan 2020 15:17:18 +0000
- Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
- Architectures: all amd64 arm64
- Components: contrib main
- MD5Sum:
- #{contrib_all_md5sum} #{contrib_all_size} contrib/binary-all/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-amd64/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-arm64/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 main/binary-all/Packages
- #{main_amd64_md5sum} #{main_amd64_size} main/binary-amd64/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 main/binary-arm64/Packages
- SHA256:
- #{contrib_all_sha256} #{contrib_all_size} contrib/binary-all/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-amd64/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-arm64/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-all/Packages
- #{main_amd64_sha256} #{main_amd64_size} main/binary-amd64/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-arm64/Packages
- EOF
-
- distribution.file.use_file do |file_path|
- expect(File.read(file_path)).to eq(expected_release_content)
+ context 'with Debian components and architectures' do
+ let_it_be(:component_main) { create("debian_#{container_type}_component", distribution: distribution, name: 'main') }
+ let_it_be(:component_contrib) { create("debian_#{container_type}_component", distribution: distribution, name: 'contrib') }
+
+ let_it_be(:architecture_all) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
+ let_it_be(:architecture_amd64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'amd64') }
+ let_it_be(:architecture_arm64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'arm64') }
+
+ let_it_be(:component_file1) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, updated_at: '2020-01-24T08:00:00Z', file_sha256: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', file_md5: 'd41d8cd98f00b204e9800998ecf8427e', file_fixture: nil, size: 0) } # updated
+ let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_all, updated_at: '2020-01-24T09:00:00Z', file_sha256: 'a') } # destroyed
+ let_it_be(:component_file3) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_amd64, updated_at: '2020-01-24T10:54:59Z', file_sha256: 'b') } # destroyed, 1 second before last generation
+ let_it_be(:component_file4) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, updated_at: '2020-01-24T10:55:00Z', file_sha256: 'c') } # kept, last generation
+ let_it_be(:component_file5) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, updated_at: '2020-01-24T10:55:00Z', file_sha256: 'd') } # kept, last generation
+ let_it_be(:component_file6) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_amd64, updated_at: '2020-01-25T15:17:18Z', file_sha256: 'e') } # kept, less than 1 hour ago
+
+ def check_component_file(release_date, component_name, component_file_type, architecture_name, expected_content)
+ component_file = distribution
+ .component_files
+ .with_component_name(component_name)
+ .with_file_type(component_file_type)
+ .with_architecture_name(architecture_name)
+ .order_updated_asc
+ .last
+
+ expect(component_file).not_to be_nil
+ expect(component_file.updated_at).to eq(release_date)
+
+ unless expected_content.nil?
+ component_file.file.use_file do |file_path|
+ expect(File.read(file_path)).to eq(expected_content)
+ end
end
end
+
+ it 'generates Debian distribution and component files', :aggregate_failures do
+ current_time = Time.utc(2020, 01, 25, 15, 17, 18, 123456)
+
+ travel_to(current_time) do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ components_count = 2
+ architectures_count = 3
+
+ initial_count = 6
+ destroyed_count = 2
+ updated_count = 1
+ created_count = components_count * (architectures_count * 2 + 1) - updated_count
+
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and change { distribution.reload.updated_at }.to(current_time.round)
+ .and change { distribution.component_files.reset.count }.from(initial_count).to(initial_count - destroyed_count + created_count)
+ .and change { component_file1.reload.updated_at }.to(current_time.round)
+
+ package_files = package.package_files.order(id: :asc).preload_debian_file_metadata.to_a
+ pool_prefix = 'pool/unstable'
+ pool_prefix += "/#{project.id}" if container_type == :group
+ pool_prefix += "/p/#{package.name}/#{package.version}"
+ expected_main_amd64_content = <<~EOF
+ Package: libsample0
+ Source: #{package.name}
+ Version: #{package.version}
+ Installed-Size: 7
+ Maintainer: #{package_files[2].debian_fields['Maintainer']}
+ Architecture: amd64
+ Description: Some mostly empty lib
+ Used in GitLab tests.
+ .
+ Testing another paragraph.
+ Multi-Arch: same
+ Homepage: #{package_files[2].debian_fields['Homepage']}
+ Section: libs
+ Priority: optional
+ Filename: #{pool_prefix}/libsample0_1.2.3~alpha2_amd64.deb
+ Size: 409600
+ MD5sum: #{package_files[2].file_md5}
+ SHA256: #{package_files[2].file_sha256}
+
+ Package: sample-dev
+ Source: #{package.name} (#{package.version})
+ Version: 1.2.3~binary
+ Installed-Size: 7
+ Maintainer: #{package_files[3].debian_fields['Maintainer']}
+ Architecture: amd64
+ Depends: libsample0 (= 1.2.3~binary)
+ Description: Some mostly empty development files
+ Used in GitLab tests.
+ .
+ Testing another paragraph.
+ Multi-Arch: same
+ Homepage: #{package_files[3].debian_fields['Homepage']}
+ Section: libdevel
+ Priority: optional
+ Filename: #{pool_prefix}/sample-dev_1.2.3~binary_amd64.deb
+ Size: 409600
+ MD5sum: #{package_files[3].file_md5}
+ SHA256: #{package_files[3].file_sha256}
+ EOF
+
+ expected_main_amd64_di_content = <<~EOF
+ Section: misc
+ Priority: extra
+ Filename: #{pool_prefix}/sample-udeb_1.2.3~alpha2_amd64.udeb
+ Size: 409600
+ MD5sum: #{package_files[4].file_md5}
+ SHA256: #{package_files[4].file_sha256}
+ EOF
+
+ expected_main_source_content = <<~EOF
+ Package: #{package.name}
+ Binary: sample-dev, libsample0, sample-udeb
+ Version: #{package.version}
+ Maintainer: #{package_files[1].debian_fields['Maintainer']}
+ Build-Depends: debhelper-compat (= 13)
+ Architecture: any
+ Standards-Version: 4.5.0
+ Format: 3.0 (native)
+ Files:
+ #{package_files[1].file_md5} #{package_files[1].size} #{package_files[1].file_name}
+ d5ca476e4229d135a88f9c729c7606c9 864 sample_1.2.3~alpha2.tar.xz
+ Checksums-Sha256:
+ #{package_files[1].file_sha256} #{package_files[1].size} #{package_files[1].file_name}
+ 40e4682bb24a73251ccd7c7798c0094a649091e5625d6a14bcec9b4e7174f3da 864 sample_1.2.3~alpha2.tar.xz
+ Checksums-Sha1:
+ #{package_files[1].file_sha1} #{package_files[1].size} #{package_files[1].file_name}
+ c5cfc111ea924842a89a06d5673f07dfd07de8ca 864 sample_1.2.3~alpha2.tar.xz
+ Homepage: #{package_files[1].debian_fields['Homepage']}
+ Section: misc
+ Priority: extra
+ Directory: #{pool_prefix}
+ EOF
+
+ check_component_file(current_time.round, 'main', :packages, 'all', nil)
+ check_component_file(current_time.round, 'main', :packages, 'amd64', expected_main_amd64_content)
+ check_component_file(current_time.round, 'main', :packages, 'arm64', nil)
+
+ check_component_file(current_time.round, 'main', :di_packages, 'all', nil)
+ check_component_file(current_time.round, 'main', :di_packages, 'amd64', expected_main_amd64_di_content)
+ check_component_file(current_time.round, 'main', :di_packages, 'arm64', nil)
+
+ check_component_file(current_time.round, 'main', :source, nil, expected_main_source_content)
+
+ check_component_file(current_time.round, 'contrib', :packages, 'all', nil)
+ check_component_file(current_time.round, 'contrib', :packages, 'amd64', nil)
+ check_component_file(current_time.round, 'contrib', :packages, 'arm64', nil)
+
+ check_component_file(current_time.round, 'contrib', :di_packages, 'all', nil)
+ check_component_file(current_time.round, 'contrib', :di_packages, 'amd64', nil)
+ check_component_file(current_time.round, 'contrib', :di_packages, 'arm64', nil)
+
+ check_component_file(current_time.round, 'contrib', :source, nil, nil)
+
+ main_amd64_size = expected_main_amd64_content.length
+ main_amd64_md5sum = Digest::MD5.hexdigest(expected_main_amd64_content)
+ main_amd64_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_content)
+
+ contrib_all_size = component_file1.size
+ contrib_all_md5sum = component_file1.file_md5
+ contrib_all_sha256 = component_file1.file_sha256
+
+ main_amd64_di_size = expected_main_amd64_di_content.length
+ main_amd64_di_md5sum = Digest::MD5.hexdigest(expected_main_amd64_di_content)
+ main_amd64_di_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_di_content)
+
+ main_source_size = expected_main_source_content.length
+ main_source_md5sum = Digest::MD5.hexdigest(expected_main_source_content)
+ main_source_sha256 = Digest::SHA256.hexdigest(expected_main_source_content)
+
+ expected_release_content = <<~EOF
+ Codename: unstable
+ Date: Sat, 25 Jan 2020 15:17:18 +0000
+ Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
+ Architectures: all amd64 arm64
+ Components: contrib main
+ MD5Sum:
+ #{contrib_all_md5sum} #{contrib_all_size} contrib/binary-all/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-all/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-amd64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-amd64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-arm64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-arm64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/source/Source
+ d41d8cd98f00b204e9800998ecf8427e 0 main/binary-all/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-all/Packages
+ #{main_amd64_md5sum} #{main_amd64_size} main/binary-amd64/Packages
+ #{main_amd64_di_md5sum} #{main_amd64_di_size} main/debian-installer/binary-amd64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 main/binary-arm64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-arm64/Packages
+ #{main_source_md5sum} #{main_source_size} main/source/Source
+ SHA256:
+ #{contrib_all_sha256} #{contrib_all_size} contrib/binary-all/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-all/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-amd64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-amd64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-arm64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-arm64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/source/Source
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-all/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-all/Packages
+ #{main_amd64_sha256} #{main_amd64_size} main/binary-amd64/Packages
+ #{main_amd64_di_sha256} #{main_amd64_di_size} main/debian-installer/binary-amd64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-arm64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-arm64/Packages
+ #{main_source_sha256} #{main_source_size} main/source/Source
+ EOF
+
+ check_release_files(expected_release_content)
+ end
+
+ create_list(:debian_package, 10, project: project, published_in: project_distribution)
+ control_count = ActiveRecord::QueryRecorder.new { subject2 }.count
+
+ create_list(:debian_package, 10, project: project, published_in: project_distribution)
+ expect { subject3 }.not_to exceed_query_limit(control_count)
+ end
end
-end
-RSpec.shared_examples 'Generate minimal Debian Distribution' do
- it 'generates minimal distribution', :aggregate_failures do
- travel_to(Time.utc(2020, 01, 25, 15, 17, 18, 123456)) do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- expect { subject }
- .to not_change { Packages::Package.count }
- .and not_change { Packages::PackageFile.count }
- .and not_change { distribution.component_files.reset.count }
-
- expected_release_content = <<~EOF
- Codename: unstable
- Date: Sat, 25 Jan 2020 15:17:18 +0000
- Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
- MD5Sum:
- SHA256:
- EOF
-
- distribution.file.use_file do |file_path|
- expect(File.read(file_path)).to eq(expected_release_content)
+ context 'without components and architectures' do
+ it 'generates minimal distribution', :aggregate_failures do
+ travel_to(Time.utc(2020, 01, 25, 15, 17, 18, 123456)) do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and not_change { distribution.component_files.reset.count }
+
+ expected_release_content = <<~EOF
+ Codename: unstable
+ Date: Sat, 25 Jan 2020 15:17:18 +0000
+ Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
+ MD5Sum:
+ SHA256:
+ EOF
+
+ check_release_files(expected_release_content)
end
end
end
diff --git a/spec/support_specs/database/prevent_cross_database_modification_spec.rb b/spec/support_specs/database/prevent_cross_database_modification_spec.rb
new file mode 100644
index 00000000000..4fd55d59db0
--- /dev/null
+++ b/spec/support_specs/database/prevent_cross_database_modification_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Database::PreventCrossDatabaseModification' do
+ let_it_be(:pipeline, refind: true) { create(:ci_pipeline) }
+ let_it_be(:project, refind: true) { create(:project) }
+
+ shared_examples 'succeessful examples' do
+ context 'outside transaction' do
+ it { expect { run_queries }.not_to raise_error }
+ end
+
+ context 'within transaction' do
+ it do
+ Project.transaction do
+ expect { run_queries }.not_to raise_error
+ end
+ end
+ end
+
+ context 'within nested transaction' do
+ it do
+ Project.transaction(requires_new: true) do
+ Project.transaction(requires_new: true) do
+ expect { run_queries }.not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
+ context 'when CI and other tables are read in a transaction' do
+ def run_queries
+ pipeline.reload
+ project.reload
+ end
+
+ include_examples 'succeessful examples'
+ end
+
+ context 'when only CI data is modified' do
+ def run_queries
+ pipeline.touch
+ project.reload
+ end
+
+ include_examples 'succeessful examples'
+ end
+
+ context 'when other data is modified' do
+ def run_queries
+ pipeline.reload
+ project.touch
+ end
+
+ include_examples 'succeessful examples'
+ end
+
+ describe 'with_cross_database_modification_prevented block' do
+ it 'raises error when CI and other data is modified' do
+ expect do
+ with_cross_database_modification_prevented do
+ Project.transaction do
+ project.touch
+ pipeline.touch
+ end
+ end
+ end.to raise_error /Cross-database data modification queries/
+ end
+ end
+
+ context 'when running tests with prevent_cross_database_modification', :prevent_cross_database_modification do
+ context 'when both CI and other data is modified' do
+ def run_queries
+ project.touch
+ pipeline.touch
+ end
+
+ context 'outside transaction' do
+ it { expect { run_queries }.not_to raise_error }
+ end
+
+ context 'when data modification happens in a transaction' do
+ it 'raises error' do
+ Project.transaction do
+ expect { run_queries }.to raise_error /Cross-database data modification queries/
+ end
+ end
+
+ context 'when data modification happens in nested transactions' do
+ it 'raises error' do
+ Project.transaction(requires_new: true) do
+ project.touch
+ Project.transaction(requires_new: true) do
+ expect { pipeline.touch }.to raise_error /Cross-database data modification queries/
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'when CI association is modified through project' do
+ def run_queries
+ project.variables.build(key: 'a', value: 'v')
+ project.save!
+ end
+
+ include_examples 'succeessful examples'
+ end
+
+ describe '#allow_cross_database_modification_within_transaction' do
+ it 'skips raising error' do
+ expect do
+ Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'gitlab-issue') do
+ Project.transaction do
+ pipeline.touch
+ project.touch
+ end
+ end
+ end.not_to raise_error
+ end
+
+ it 'raises error when complex factories are built referencing both databases' do
+ expect do
+ ApplicationRecord.transaction do
+ create(:ci_pipeline)
+ end
+ end.to raise_error /Cross-database data modification queries/
+ end
+
+ it 'skips raising error on factory creation' do
+ expect do
+ Gitlab::Database.allow_cross_database_modification_within_transaction(url: 'gitlab-issue') do
+ ApplicationRecord.transaction do
+ create(:ci_pipeline)
+ end
+ end
+ end.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/support_specs/database/prevent_cross_joins_spec.rb b/spec/support_specs/database/prevent_cross_joins_spec.rb
new file mode 100644
index 00000000000..dd4ed9c40b8
--- /dev/null
+++ b/spec/support_specs/database/prevent_cross_joins_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::PreventCrossJoins do
+ context 'when running in :prevent_cross_joins scope', :prevent_cross_joins do
+ context 'when only non-CI tables are used' do
+ it 'does not raise exception' do
+ expect { main_only_query }.not_to raise_error
+ end
+ end
+
+ context 'when only CI tables are used' do
+ it 'does not raise exception' do
+ expect { ci_only_query }.not_to raise_error
+ end
+ end
+
+ context 'when CI and non-CI tables are used' do
+ it 'raises exception' do
+ expect { main_and_ci_query }.to raise_error(
+ described_class::CrossJoinAcrossUnsupportedTablesError)
+ end
+
+ context 'when allow_cross_joins_across_databases is used' do
+ it 'does not raise exception' do
+ Gitlab::Database.allow_cross_joins_across_databases(url: 'http://issue-url')
+
+ expect { main_and_ci_query }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context 'when running in a default scope' do
+ context 'when CI and non-CI tables are used' do
+ it 'does not raise exception' do
+ expect { main_and_ci_query }.not_to raise_error
+ end
+ end
+ end
+
+ private
+
+ def main_only_query
+ Issue.joins(:project).last
+ end
+
+ def ci_only_query
+ Ci::Build.joins(:pipeline).last
+ end
+
+ def main_and_ci_query
+ Ci::Build.joins(:project).last
+ end
+end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index ebaaf179546..99deaa8d154 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -412,6 +412,16 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
end
end
+
+ context 'CRON env is set' do
+ before do
+ stub_env('CRON', '1')
+ end
+
+ it 'does not output to stdout' do
+ expect { run_rake_task('gitlab:backup:create') }.not_to output.to_stdout_from_any_process
+ end
+ end
end
# backup_create task
@@ -480,8 +490,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
'lfs.tar.gz',
'pages.tar.gz',
'registry.tar.gz',
- 'repositories',
- 'tmp'
+ 'repositories'
)
end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 03fbd238ee9..8e98a42510e 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -201,9 +201,36 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
let(:reindex) { double('reindex') }
let(:indexes) { double('indexes') }
+ it 'cleans up any leftover indexes' do
+ expect(Gitlab::Database::Reindexing).to receive(:cleanup_leftovers!)
+
+ run_rake_task('gitlab:db:reindex')
+ end
+
+ context 'when async index creation is enabled' do
+ it 'executes async index creation prior to any reindexing actions' do
+ stub_feature_flags(database_async_index_creation: true)
+
+ expect(Gitlab::Database::AsyncIndexes).to receive(:create_pending_indexes!).ordered
+ expect(Gitlab::Database::Reindexing).to receive(:perform).ordered
+
+ run_rake_task('gitlab:db:reindex')
+ end
+ end
+
+ context 'when async index creation is disabled' do
+ it 'does not execute async index creation' do
+ stub_feature_flags(database_async_index_creation: false)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!)
+
+ run_rake_task('gitlab:db:reindex')
+ end
+ end
+
context 'when no index_name is given' do
it 'uses all candidate indexes' do
- expect(Gitlab::Database::Reindexing).to receive(:candidate_indexes).and_return(indexes)
+ expect(Gitlab::Database::PostgresIndex).to receive(:reindexing_support).and_return(indexes)
expect(Gitlab::Database::Reindexing).to receive(:perform).with(indexes)
run_rake_task('gitlab:db:reindex')
@@ -214,7 +241,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
let(:index) { double('index') }
before do
- allow(Gitlab::Database::Reindexing).to receive(:candidate_indexes).and_return(indexes)
+ allow(Gitlab::Database::PostgresIndex).to receive(:reindexing_support).and_return(indexes)
end
it 'calls the index rebuilder with the proper arguments' do
@@ -270,8 +297,8 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
let(:ctx) { double('ctx', migrations: all_migrations, schema_migration: double, get_all_versions: existing_versions) }
let(:instrumentation) { instance_double(Gitlab::Database::Migrations::Instrumentation, observations: observations) }
let(:existing_versions) { [1] }
- let(:all_migrations) { [double('migration1', version: 1), pending_migration] }
- let(:pending_migration) { double('migration2', version: 2) }
+ let(:all_migrations) { [double('migration1', version: 1, name: 'test'), pending_migration] }
+ let(:pending_migration) { double('migration2', version: 2, name: 'test') }
let(:filename) { Gitlab::Database::Migrations::Instrumentation::STATS_FILENAME }
let(:result_dir) { Dir.mktmpdir }
let(:observations) { %w[some data] }
@@ -297,7 +324,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
end
it 'instruments the pending migration' do
- expect(instrumentation).to receive(:observe).with(2).and_yield
+ expect(instrumentation).to receive(:observe).with(version: 2, name: 'test').and_yield
subject
end
diff --git a/spec/tasks/gitlab/ldap_rake_spec.rb b/spec/tasks/gitlab/ldap_rake_spec.rb
index 693bb7826a3..b18c2c88a46 100644
--- a/spec/tasks/gitlab/ldap_rake_spec.rb
+++ b/spec/tasks/gitlab/ldap_rake_spec.rb
@@ -38,13 +38,13 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
it 'displays error when key does not exist' do
Settings.encrypted(ldap_secret_file).write('somevalue')
allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
- expect { run_rake_task('gitlab:ldap:secret:show') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:show') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
end
it 'displays error when key is changed' do
Settings.encrypted(ldap_secret_file).write('somevalue')
allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
- expect { run_rake_task('gitlab:ldap:secret:show') }.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:show') }.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stderr
end
it 'outputs the unencrypted content when present' do
@@ -64,18 +64,18 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
it 'displays error when key does not exist' do
allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
- expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
end
it 'displays error when key is changed' do
Settings.encrypted(ldap_secret_file).write('somevalue')
allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
- expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stderr
end
it 'displays error when write directory does not exist' do
FileUtils.rm_rf(Rails.root.join('tmp/tests/ldapenc'))
- expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/Directory .* does not exist./).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/Directory .* does not exist./).to_stderr
end
it 'shows a warning when content is invalid' do
@@ -87,7 +87,7 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
it 'displays error when $EDITOR is not set' do
stub_env('EDITOR', nil)
- expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/No \$EDITOR specified to open file. Please provide one when running the command/).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/No \$EDITOR specified to open file. Please provide one when running the command/).to_stderr
end
end
@@ -106,12 +106,12 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
it 'displays error when key does not exist' do
allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
- expect { run_rake_task('gitlab:ldap:secret:write') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:write') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
end
it 'displays error when write directory does not exist' do
FileUtils.rm_rf('tmp/tests/ldapenc/')
- expect { run_rake_task('gitlab:ldap:secret:write') }.to output(/Directory .* does not exist./).to_stdout
+ expect { run_rake_task('gitlab:ldap:secret:write') }.to output(/Directory .* does not exist./).to_stderr
end
it 'shows a warning when content is invalid' do
diff --git a/spec/tasks/gitlab/product_intelligence_rake_spec.rb b/spec/tasks/gitlab/product_intelligence_rake_spec.rb
new file mode 100644
index 00000000000..029e181ad06
--- /dev/null
+++ b/spec/tasks/gitlab/product_intelligence_rake_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:product_intelligence:activate_metrics', :silence_stdout do
+ def fake_metric(key_path, milestone: 'test_milestone', status: 'implemented')
+ Gitlab::Usage::MetricDefinition.new(key_path, { key_path: key_path, milestone: milestone, status: status })
+ end
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/product_intelligence'
+ stub_warn_user_is_not_gitlab
+ end
+
+ describe 'activate_metrics' do
+ it 'fails if the MILESTONE env var is not set' do
+ stub_env('MILESTONE' => nil)
+
+ expect { run_rake_task('gitlab:product_intelligence:activate_metrics') }.to raise_error(RuntimeError, 'Please supply the MILESTONE env var')
+ end
+
+ context 'with MILESTONE env var' do
+ subject do
+ updated_metrics = []
+
+ file = double('file')
+ allow(file).to receive(:<<) { |contents| updated_metrics << YAML.safe_load(contents) }
+ allow(File).to receive(:open).and_yield(file)
+
+ stub_env('MILESTONE' => 'test_milestone')
+ run_rake_task('gitlab:product_intelligence:activate_metrics')
+
+ updated_metrics
+ end
+
+ let(:metric_definitions) do
+ {
+ matching_metric: fake_metric('matching_metric'),
+ matching_metric2: fake_metric('matching_metric2'),
+ other_status_metric: fake_metric('other_status_metric', status: 'deprecated'),
+ other_milestone_metric: fake_metric('other_milestone_metric', milestone: 'other_milestone')
+ }
+ end
+
+ before do
+ allow(Gitlab::Usage::MetricDefinition).to receive(:definitions).and_return(metric_definitions)
+ end
+
+ context 'with metric matching status and milestone' do
+ it 'updates matching_metric yaml file' do
+ expect(subject).to eq([
+ { 'key_path' => 'matching_metric', 'milestone' => 'test_milestone', 'status' => 'data_available' },
+ { 'key_path' => 'matching_metric2', 'milestone' => 'test_milestone', 'status' => 'data_available' }
+ ])
+ end
+ end
+
+ context 'without metrics definitions' do
+ let(:metric_definitions) { {} }
+
+ it 'runs successfully with no updates' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'without matching metrics' do
+ let(:metric_definitions) do
+ {
+ other_status_metric: fake_metric('other_status_metric', status: 'deprecated'),
+ other_milestone_metric: fake_metric('other_milestone_metric', milestone: 'other_milestone')
+ }
+ end
+
+ it 'runs successfully with no updates' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/smtp_rake_spec.rb b/spec/tasks/gitlab/smtp_rake_spec.rb
new file mode 100644
index 00000000000..572df8421d5
--- /dev/null
+++ b/spec/tasks/gitlab/smtp_rake_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:smtp:secret rake tasks' do
+ let(:smtp_secret_file) { 'tmp/tests/smtpenc/smtp_secret.yaml.enc' }
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/smtp'
+ stub_env('EDITOR', 'cat')
+ stub_warn_user_is_not_gitlab
+ FileUtils.mkdir_p('tmp/tests/smtpenc/')
+ allow(Gitlab.config.gitlab).to receive(:email_smtp_secret_file).and_return(smtp_secret_file)
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
+ end
+
+ after do
+ FileUtils.rm_rf(Rails.root.join('tmp/tests/smtpenc'))
+ end
+
+ describe ':show' do
+ it 'displays error when file does not exist' do
+ expect { run_rake_task('gitlab:smtp:secret:show') }.to output(/File .* does not exist. Use `gitlab-rake gitlab:smtp:secret:edit` to change that./).to_stdout
+ end
+
+ it 'displays error when key does not exist' do
+ Settings.encrypted(smtp_secret_file).write('somevalue')
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
+ expect { run_rake_task('gitlab:smtp:secret:show') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
+ end
+
+ it 'displays error when key is changed' do
+ Settings.encrypted(smtp_secret_file).write('somevalue')
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
+ expect { run_rake_task('gitlab:smtp:secret:show') }.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stderr
+ end
+
+ it 'outputs the unencrypted content when present' do
+ encrypted = Settings.encrypted(smtp_secret_file)
+ encrypted.write('somevalue')
+ expect { run_rake_task('gitlab:smtp:secret:show') }.to output(/somevalue/).to_stdout
+ end
+ end
+
+ describe 'edit' do
+ it 'creates encrypted file' do
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/File encrypted and saved./).to_stdout
+ expect(File.exist?(smtp_secret_file)).to be true
+ value = Settings.encrypted(smtp_secret_file)
+ expect(value.read).to match(/password: '123'/)
+ end
+
+ it 'displays error when key does not exist' do
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
+ end
+
+ it 'displays error when key is changed' do
+ Settings.encrypted(smtp_secret_file).write('somevalue')
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stderr
+ end
+
+ it 'displays error when write directory does not exist' do
+ FileUtils.rm_rf(Rails.root.join('tmp/tests/smtpenc'))
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/Directory .* does not exist./).to_stderr
+ end
+
+ it 'shows a warning when content is invalid' do
+ Settings.encrypted(smtp_secret_file).write('somevalue')
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/WARNING: Content was not a valid SMTP secret yml file/).to_stdout
+ value = Settings.encrypted(smtp_secret_file)
+ expect(value.read).to match(/somevalue/)
+ end
+
+ it 'displays error when $EDITOR is not set' do
+ stub_env('EDITOR', nil)
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/No \$EDITOR specified to open file. Please provide one when running the command/).to_stderr
+ end
+ end
+
+ describe 'write' do
+ before do
+ allow($stdin).to receive(:tty?).and_return(false)
+ allow($stdin).to receive(:read).and_return('username: foo')
+ end
+
+ it 'creates encrypted file from stdin' do
+ expect { run_rake_task('gitlab:smtp:secret:write') }.to output(/File encrypted and saved./).to_stdout
+ expect(File.exist?(smtp_secret_file)).to be true
+ value = Settings.encrypted(smtp_secret_file)
+ expect(value.read).to match(/username: foo/)
+ end
+
+ it 'displays error when key does not exist' do
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
+ expect { run_rake_task('gitlab:smtp:secret:write') }.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
+ end
+
+ it 'displays error when write directory does not exist' do
+ FileUtils.rm_rf('tmp/tests/smtpenc/')
+ expect { run_rake_task('gitlab:smtp:secret:write') }.to output(/Directory .* does not exist./).to_stderr
+ end
+
+ it 'shows a warning when content is invalid' do
+ Settings.encrypted(smtp_secret_file).write('somevalue')
+ expect { run_rake_task('gitlab:smtp:secret:edit') }.to output(/WARNING: Content was not a valid SMTP secret yml file/).to_stdout
+ value = Settings.encrypted(smtp_secret_file)
+ expect(value.read).to match(/somevalue/)
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb
index fefcd05af3b..570f67c8bb7 100644
--- a/spec/tasks/gitlab/storage_rake_spec.rb
+++ b/spec/tasks/gitlab/storage_rake_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'rake gitlab:storage:*', :silence_stdout do
shared_examples 'wait until database is ready' do
it 'checks if the database is ready once' do
- expect(Gitlab::Database).to receive(:exists?).once
+ expect(Gitlab::Database.main).to receive(:exists?).once
run_rake_task(task)
end
@@ -102,7 +102,7 @@ RSpec.describe 'rake gitlab:storage:*', :silence_stdout do
end
it 'tries for 3 times, polling every 0.1 seconds' do
- expect(Gitlab::Database).to receive(:exists?).exactly(3).times.and_return(false)
+ expect(Gitlab::Database.main).to receive(:exists?).exactly(3).times.and_return(false)
run_rake_task(task)
end
diff --git a/spec/tooling/danger/product_intelligence_spec.rb b/spec/tooling/danger/product_intelligence_spec.rb
index 17ef67e64fe..4ab911b6590 100644
--- a/spec/tooling/danger/product_intelligence_spec.rb
+++ b/spec/tooling/danger/product_intelligence_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
subject(:product_intelligence) { fake_danger.new(helper: fake_helper) }
let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
- let(:changed_files) { ['metrics/counts_7d/test_metric.yml', 'doc/development/usage_ping/dictionary.md'] }
+ let(:changed_files) { ['metrics/counts_7d/test_metric.yml'] }
let(:changed_lines) { ['+tier: ee'] }
before do
@@ -20,22 +20,6 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
allow(fake_helper).to receive(:changed_lines).and_return(changed_lines)
end
- describe '#need_dictionary_changes?' do
- subject { product_intelligence.need_dictionary_changes? }
-
- context 'when changed files do not contain dictionary changes' do
- let(:changed_files) { ['config/metrics/counts_7d/test_metric.yml'] }
-
- it { is_expected.to be true }
- end
-
- context 'when changed files already contains dictionary changes' do
- let(:changed_files) { ['doc/development/usage_ping/dictionary.md'] }
-
- it { is_expected.to be false }
- end
- end
-
describe '#missing_labels' do
subject { product_intelligence.missing_labels }
@@ -109,10 +93,6 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
end
end
- context 'with dictionary file not changed' do
- it { is_expected.to be_empty }
- end
-
context 'with metrics files changed' do
let(:changed_files) { ['config/metrics/counts_7d/test_metric.yml', 'ee/config/metrics/counts_7d/ee_metric.yml'] }
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 8bcfac5a699..f52c5e02544 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/spec/frontend/bar' | [:frontend]
'ee/spec/frontend_integration/bar' | [:frontend]
- '.gitlab/ci/frontend.gitlab-ci.yml' | %i[frontend engineering_productivity]
+ '.gitlab/ci/frontend.gitlab-ci.yml' | %i[frontend tooling]
'app/models/foo' | [:backend]
'bin/foo' | [:backend]
@@ -113,22 +113,22 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'Rakefile' | [:backend]
'FOO_VERSION' | [:backend]
- 'Dangerfile' | [:engineering_productivity]
- 'danger/bundle_size/Dangerfile' | [:engineering_productivity]
- 'ee/danger/bundle_size/Dangerfile' | [:engineering_productivity]
- 'danger/bundle_size/' | [:engineering_productivity]
- 'ee/danger/bundle_size/' | [:engineering_productivity]
- '.gitlab-ci.yml' | [:engineering_productivity]
- '.gitlab/ci/cng.gitlab-ci.yml' | [:engineering_productivity]
- '.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:engineering_productivity]
- 'scripts/foo' | [:engineering_productivity]
- 'tooling/danger/foo' | [:engineering_productivity]
- 'ee/tooling/danger/foo' | [:engineering_productivity]
- 'lefthook.yml' | [:engineering_productivity]
- '.editorconfig' | [:engineering_productivity]
- 'tooling/bin/find_foss_tests' | [:engineering_productivity]
- '.codeclimate.yml' | [:engineering_productivity]
- '.gitlab/CODEOWNERS' | [:engineering_productivity]
+ 'Dangerfile' | [:tooling]
+ 'danger/bundle_size/Dangerfile' | [:tooling]
+ 'ee/danger/bundle_size/Dangerfile' | [:tooling]
+ 'danger/bundle_size/' | [:tooling]
+ 'ee/danger/bundle_size/' | [:tooling]
+ '.gitlab-ci.yml' | [:tooling]
+ '.gitlab/ci/cng.gitlab-ci.yml' | [:tooling]
+ '.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:tooling]
+ 'scripts/foo' | [:tooling]
+ 'tooling/danger/foo' | [:tooling]
+ 'ee/tooling/danger/foo' | [:tooling]
+ 'lefthook.yml' | [:tooling]
+ '.editorconfig' | [:tooling]
+ 'tooling/bin/find_foss_tests' | [:tooling]
+ '.codeclimate.yml' | [:tooling]
+ '.gitlab/CODEOWNERS' | [:tooling]
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | [:ci_template]
'lib/gitlab/ci/templates/dotNET-Core.yml' | [:ci_template]
@@ -177,7 +177,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/config/metrics/counts_7d/20210216174919_g_analytics_issues_weekly.yml' | [:product_intelligence]
'lib/gitlab/usage_data_counters/aggregated_metrics/common.yml' | [:product_intelligence]
'lib/gitlab/usage_data_counters/hll_redis_counter.rb' | [:backend, :product_intelligence]
- 'doc/development/usage_ping/dictionary.md' | [:docs, :product_intelligence]
'lib/gitlab/tracking.rb' | [:backend, :product_intelligence]
'spec/lib/gitlab/tracking_spec.rb' | [:backend, :product_intelligence]
'app/helpers/tracking_helper.rb' | [:backend, :product_intelligence]
diff --git a/spec/tooling/graphql/docs/renderer_spec.rb b/spec/tooling/graphql/docs/renderer_spec.rb
index 50ebb754ca4..de5ec928921 100644
--- a/spec/tooling/graphql/docs/renderer_spec.rb
+++ b/spec/tooling/graphql/docs/renderer_spec.rb
@@ -14,13 +14,13 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
let(:template) { Rails.root.join('tooling/graphql/docs/templates/default.md.haml') }
let(:field_description) { 'List of objects.' }
- let(:type) { ::GraphQL::INT_TYPE }
+ let(:type) { ::GraphQL::Types::Int }
let(:query_type) do
Class.new(Types::BaseObject) { graphql_name 'Query' }.tap do |t|
# this keeps type and field_description in scope.
t.field :foo, type, null: true, description: field_description do
- argument :id, GraphQL::ID_TYPE, required: false, description: 'ID of the object.'
+ argument :id, GraphQL::Types::ID, required: false, description: 'ID of the object.'
end
end
end
@@ -73,7 +73,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
Class.new(Types::BaseObject) do
graphql_name 'ArrayTest'
- field :foo, [GraphQL::STRING_TYPE], null: false, description: 'A description.'
+ field :foo, [GraphQL::Types::String], null: false, description: 'A description.'
end
end
@@ -129,8 +129,8 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
Class.new(Types::BaseObject) do
graphql_name 'OrderingTest'
- field :foo, GraphQL::STRING_TYPE, null: false, description: 'A description of foo field.'
- field :bar, GraphQL::STRING_TYPE, null: false, description: 'A description of bar field.'
+ field :foo, GraphQL::Types::String, null: false, description: 'A description of foo field.'
+ field :bar, GraphQL::Types::String, null: false, description: 'A description of bar field.'
end
end
@@ -154,7 +154,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
let(:type) do
wibble = Class.new(::Types::BaseObject) do
graphql_name 'Wibble'
- field :x, ::GraphQL::INT_TYPE, null: false
+ field :x, ::GraphQL::Types::Int, null: false
end
Class.new(Types::BaseObject) do
@@ -162,16 +162,16 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
description 'Testing doc refs'
field :foo,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: false,
description: 'The foo.',
see: { 'A list of foos' => 'https://example.com/foos' }
field :bar,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: false,
description: 'The bar.',
see: { 'A list of bars' => 'https://example.com/bars' } do
- argument :barity, ::GraphQL::INT_TYPE, required: false, description: '?'
+ argument :barity, ::GraphQL::Types::Int, required: false, description: '?'
end
field :wibbles,
type: wibble.connection_type,
@@ -220,10 +220,10 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
description 'A thing we used to use, but no longer support'
field :foo,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: false,
description: 'A description.' do
- argument :foo_arg, GraphQL::STRING_TYPE,
+ argument :foo_arg, GraphQL::Types::String,
required: false,
description: 'The argument.',
deprecated: { reason: 'Bad argument', milestone: '101.2' }
@@ -257,19 +257,19 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
description 'A thing we used to use, but no longer support'
field :foo,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: false,
deprecated: { reason: 'This is deprecated', milestone: '1.10' },
description: 'A description.'
field :foo_with_args,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: false,
deprecated: { reason: 'Do not use', milestone: '1.10', replacement: 'X.y' },
description: 'A description.' do
- argument :arg, GraphQL::INT_TYPE, required: false, description: 'Argity'
+ argument :arg, GraphQL::Types::Int, required: false, description: 'Argity'
end
field :bar,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: false,
description: 'A description.',
deprecated: {
@@ -328,7 +328,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
)
end
- let(:type) { ::GraphQL::INT_TYPE }
+ let(:type) { ::GraphQL::Types::Int }
let(:section) do
<<~DOC
### `Query.bar`
@@ -453,12 +453,12 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
}
mutation.field :everything,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: true,
description: 'What we made prettier.'
mutation.field :omnis,
- type: GraphQL::STRING_TYPE,
+ type: GraphQL::Types::String,
null: true,
description: 'What we made prettier.',
deprecated: {
@@ -516,7 +516,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
let(:type) do
Class.new(::Types::BaseObject) do
graphql_name 'Foo'
- field :wibble, type: ::GraphQL::INT_TYPE, null: true do
+ field :wibble, type: ::GraphQL::Types::Int, null: true do
argument :date_range,
type: ::Types::TimeframeInputType,
required: true,
@@ -547,10 +547,10 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
let(:type) do
user = Class.new(::Types::BaseObject)
user.graphql_name 'User'
- user.field :user_field, ::GraphQL::STRING_TYPE, null: true
+ user.field :user_field, ::GraphQL::Types::String, null: true
group = Class.new(::Types::BaseObject)
group.graphql_name 'Group'
- group.field :group_field, ::GraphQL::STRING_TYPE, null: true
+ group.field :group_field, ::GraphQL::Types::String, null: true
union = Class.new(::Types::BaseUnion)
union.graphql_name 'UserOrGroup'
@@ -561,7 +561,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
interface.include(::Types::BaseInterface)
interface.graphql_name 'Flying'
interface.description 'Something that can fly.'
- interface.field :flight_speed, GraphQL::INT_TYPE, null: true, description: 'Speed in mph.'
+ interface.field :flight_speed, GraphQL::Types::Int, null: true, description: 'Speed in mph.'
african_swallow = Class.new(::Types::BaseObject)
african_swallow.graphql_name 'AfricanSwallow'
diff --git a/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb b/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb
index d36bfac4de8..203a453bcdd 100644
--- a/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb
+++ b/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb
@@ -47,6 +47,16 @@ RSpec.describe Packages::Debian::DistributionReleaseFileUploader do
end
end
end
+
+ describe '#filename' do
+ it { expect(subject.filename).to eq('Release')}
+
+ context 'with signed_file' do
+ let(:uploader) { described_class.new(distribution, :signed_file) }
+
+ it { expect(subject.filename).to eq('InRelease')}
+ end
+ end
end
end
end
diff --git a/spec/validators/any_field_validator_spec.rb b/spec/validators/any_field_validator_spec.rb
new file mode 100644
index 00000000000..bede006abf6
--- /dev/null
+++ b/spec/validators/any_field_validator_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AnyFieldValidator do
+ context 'when validation is instantiated correctly' do
+ let(:validated_class) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'vulnerabilities'
+
+ validates_with AnyFieldValidator, fields: %w(title description)
+ end
+ end
+
+ it 'raises an error if no fields are defined' do
+ validated_object = validated_class.new
+
+ expect(validated_object.valid?).to be_falsey
+ expect(validated_object.errors.messages)
+ .to eq(base: ["At least one field of %{one_of_required_fields} must be present" %
+ { one_of_required_fields: %w(title description) }])
+ end
+
+ it 'validates if only one field is present' do
+ validated_object = validated_class.new(title: 'Vulnerability title')
+
+ expect(validated_object.valid?).to be_truthy
+ end
+ end
+
+ context 'when validation is missing the fields parameter' do
+ let(:invalid_class) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'vulnerabilities'
+
+ validates_with AnyFieldValidator
+ end
+ end
+
+ it 'raises an error' do
+ expect { invalid_class.new }.to raise_error(RuntimeError)
+ end
+ end
+end
diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb
index 43a6fcc2adc..9fa95613d1c 100644
--- a/spec/views/admin/dashboard/index.html.haml_spec.rb
+++ b/spec/views/admin/dashboard/index.html.haml_spec.rb
@@ -52,4 +52,12 @@ RSpec.describe 'admin/dashboard/index.html.haml' do
expect(rendered).not_to have_content "Maximum Users"
expect(rendered).not_to have_content "Users over License"
end
+
+ it 'links to the GitLab Changelog' do
+ stub_application_setting(version_check_enabled: true)
+
+ render
+
+ expect(rendered).to have_link(href: 'https://gitlab.com/gitlab-org/gitlab/-/blob/master/CHANGELOG.md')
+ end
end
diff --git a/spec/views/admin/sessions/new.html.haml_spec.rb b/spec/views/admin/sessions/new.html.haml_spec.rb
index 94870f0bdba..97528b6e782 100644
--- a/spec/views/admin/sessions/new.html.haml_spec.rb
+++ b/spec/views/admin/sessions/new.html.haml_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe 'admin/sessions/new.html.haml' do
it 'shows enter password form' do
render
- expect(rendered).to have_selector('[data-qa-selector="sign_in_tab"]')
+ expect(rendered).to have_selector('[data-qa-selector="sign_in_tab"]') # rubocop:disable QA/SelectorUsage
expect(rendered).to have_css('#login-pane.active')
- expect(rendered).to have_selector('[data-qa-selector="password_field"]')
+ expect(rendered).to have_selector('[data-qa-selector="password_field"]') # rubocop:disable QA/SelectorUsage
end
it 'warns authentication not possible if password not set' do
@@ -60,7 +60,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
it 'is shown when enabled' do
render
- expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]')
+ expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
expect(rendered).to have_css('.login-box#ldapmain')
expect(rendered).to have_field('LDAP Username')
expect(rendered).not_to have_content('No authentication methods configured')
@@ -71,7 +71,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
render
- expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]')
+ expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
expect(rendered).not_to have_field('LDAP Username')
expect(rendered).to have_content('No authentication methods configured')
end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index d3552bf2e5a..0109d05abe4 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'devise/sessions/new' do
render
expect(rendered).to have_selector('.new-session-tabs')
- expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]')
+ expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
expect(rendered).to have_field('LDAP Username')
end
@@ -58,7 +58,7 @@ RSpec.describe 'devise/sessions/new' do
render
expect(rendered).to have_content('No authentication methods configured')
- expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]')
+ expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
expect(rendered).not_to have_field('LDAP Username')
end
end
diff --git a/spec/views/groups/edit.html.haml_spec.rb b/spec/views/groups/edit.html.haml_spec.rb
index f40b03fda2a..43e11d31611 100644
--- a/spec/views/groups/edit.html.haml_spec.rb
+++ b/spec/views/groups/edit.html.haml_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'groups/edit.html.haml' do
render
expect(rendered).to have_content("Prevent sharing a project within #{test_group.name} with other groups")
- expect(rendered).to have_css('.js-descr', text: 'help text here')
+ expect(rendered).to have_content('help text here')
expect(rendered).to have_field('group_share_with_group_lock', **checkbox_options)
end
end
diff --git a/spec/views/groups/runners/_sort_dropdown.html.haml_spec.rb b/spec/views/groups/runners/_sort_dropdown.html.haml_spec.rb
new file mode 100644
index 00000000000..4b5027a5a56
--- /dev/null
+++ b/spec/views/groups/runners/_sort_dropdown.html.haml_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups/runners/sort_dropdown.html.haml' do
+ describe 'render' do
+ let_it_be(:sort_options_hash) { { by_title: 'Title' } }
+ let_it_be(:sort_title_created_date) { 'Created date' }
+
+ before do
+ allow(view).to receive(:sort).and_return('by_title')
+ end
+
+ describe 'when a sort option is not selected' do
+ it 'renders a default sort option' do
+ render 'groups/runners/sort_dropdown', sort_options_hash: sort_options_hash, sort_title_created_date: sort_title_created_date
+
+ expect(rendered).to have_content 'Created date'
+ end
+ end
+
+ describe 'when a sort option is selected' do
+ it 'renders the selected sort option' do
+ @sort = :by_title
+ render 'groups/runners/sort_dropdown', sort_options_hash: sort_options_hash, sort_title_created_date: sort_title_created_date
+
+ expect(rendered).to have_content 'Title'
+ end
+ end
+ end
+end
diff --git a/spec/views/groups/settings/_transfer.html.haml_spec.rb b/spec/views/groups/settings/_transfer.html.haml_spec.rb
index aeb70251a62..b557c989eae 100644
--- a/spec/views/groups/settings/_transfer.html.haml_spec.rb
+++ b/spec/views/groups/settings/_transfer.html.haml_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe 'groups/settings/_transfer.html.haml' do
render 'groups/settings/transfer', group: group
- expect(rendered).to have_selector '[data-qa-selector="select_group_dropdown"]'
- expect(rendered).not_to have_selector '[data-qa-selector="select_group_dropdown"][disabled]'
+ expect(rendered).to have_selector '[data-qa-selector="select_group_dropdown"]' # rubocop:disable QA/SelectorUsage
+ expect(rendered).not_to have_selector '[data-qa-selector="select_group_dropdown"][disabled]' # rubocop:disable QA/SelectorUsage
expect(rendered).not_to have_selector '[data-testid="group-to-transfer-has-linked-subscription-alert"]'
end
end
diff --git a/spec/views/groups/show.html.haml_spec.rb b/spec/views/groups/show.html.haml_spec.rb
index f40b03fda2a..43e11d31611 100644
--- a/spec/views/groups/show.html.haml_spec.rb
+++ b/spec/views/groups/show.html.haml_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'groups/edit.html.haml' do
render
expect(rendered).to have_content("Prevent sharing a project within #{test_group.name} with other groups")
- expect(rendered).to have_css('.js-descr', text: 'help text here')
+ expect(rendered).to have_content('help text here')
expect(rendered).to have_field('group_share_with_group_lock', **checkbox_options)
end
end
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
index 319e7b55fc3..47abfff87bb 100644
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe 'layouts/header/_new_dropdown' do
before do
allow(view).to receive(:can?).with(user, :create_projects, group).and_return(true)
allow(view).to receive(:can?).with(user, :admin_group_member, group).and_return(invite_member)
- allow(view).to receive(:can_import_members?).and_return(invite_member)
+ allow(view).to receive(:can_admin_project_member?).and_return(invite_member)
allow(view).to receive(:experiment_enabled?)
end
@@ -142,7 +142,7 @@ RSpec.describe 'layouts/header/_new_dropdown' do
let(:href) { project_project_members_path(project) }
before do
- allow(view).to receive(:can_import_members?).and_return(invite_member)
+ allow(view).to receive(:can_admin_project_member?).and_return(invite_member)
stub_current_user(user)
allow(view).to receive(:experiment_enabled?)
end
diff --git a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
index 7df076d35c4..8c9d1b32671 100644
--- a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
@@ -3,10 +3,17 @@
require 'spec_helper'
RSpec.describe 'layouts/nav/sidebar/_group' do
- let_it_be(:group) { create(:group) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) do
+ create(:group).tap do |g|
+ g.add_owner(owner)
+ end
+ end
before do
assign(:group, group)
+
+ allow(view).to receive(:current_user).and_return(owner)
end
it_behaves_like 'has nav sidebar'
@@ -27,11 +34,154 @@ RSpec.describe 'layouts/nav/sidebar/_group' do
expect(rendered).to have_link('Group information', href: activity_group_path(group))
end
+ it 'has a link to the group labels path' do
+ render
+
+ expect(rendered).to have_link('Labels', href: group_labels_path(group))
+ end
+
it 'has a link to the members page' do
render
- expect(rendered).to have_selector('.sidebar-top-level-items > li.home a[title="Members"]')
expect(rendered).to have_link('Members', href: group_group_members_path(group))
end
end
+
+ describe 'Issues' do
+ it 'has a default link to the issue list path' do
+ render
+
+ expect(rendered).to have_link('Issues', href: issues_group_path(group))
+ end
+
+ it 'has a link to the issue list page' do
+ render
+
+ expect(rendered).to have_link('List', href: issues_group_path(group))
+ end
+
+ it 'has a link to the boards page' do
+ render
+
+ expect(rendered).to have_link('Board', href: group_boards_path(group))
+ end
+
+ it 'has a link to the milestones page' do
+ render
+
+ expect(rendered).to have_link('Milestones', href: group_milestones_path(group))
+ end
+ end
+
+ describe 'Merge Requests' do
+ it 'has a link to the merge request list path' do
+ render
+
+ expect(rendered).to have_link('Merge requests', href: merge_requests_group_path(group))
+ end
+
+ it 'shows pill with the number of merge requests' do
+ render
+
+ expect(rendered).to have_css('span.badge.badge-pill.merge_counter.js-merge-counter')
+ end
+ end
+
+ describe 'CI/CD' do
+ it 'has a default link to the runners list path' do
+ render
+
+ expect(rendered).to have_link('CI/CD', href: group_runners_path(group))
+ end
+
+ it 'has a link to the runners list page' do
+ render
+
+ expect(rendered).to have_link('Runners', href: group_runners_path(group))
+ end
+ end
+
+ describe 'Kubernetes menu' do
+ it 'has a link to the group cluster list path' do
+ render
+
+ expect(rendered).to have_link('Kubernetes', href: group_clusters_path(group))
+ end
+ end
+
+ describe 'Packages & Registries' do
+ it 'has a link to the package registry page' do
+ stub_config(packages: { enabled: true })
+
+ render
+
+ expect(rendered).to have_link('Package Registry', href: group_packages_path(group))
+ end
+
+ it 'has a link to the container registry page' do
+ stub_container_registry_config(enabled: true)
+
+ render
+
+ expect(rendered).to have_link('Container Registry', href: group_container_registries_path(group))
+ end
+
+ it 'has a link to the dependency proxy page' do
+ stub_config(dependency_proxy: { enabled: true })
+
+ render
+
+ expect(rendered).to have_link('Dependency Proxy', href: group_dependency_proxy_path(group))
+ end
+ end
+
+ describe 'Settings' do
+ it 'default link points to edit group page' do
+ render
+
+ expect(rendered).to have_link('Settings', href: edit_group_path(group))
+ end
+
+ it 'has a link to the General settings page' do
+ render
+
+ expect(rendered).to have_link('General', href: edit_group_path(group))
+ end
+
+ it 'has a link to the Integrations settings page' do
+ render
+
+ expect(rendered).to have_link('Integrations', href: group_settings_integrations_path(group))
+ end
+
+ it 'has a link to the group Projects settings page' do
+ render
+
+ expect(rendered).to have_link('Projects', href: projects_group_path(group))
+ end
+
+ it 'has a link to the Repository settings page' do
+ render
+
+ expect(rendered).to have_link('Repository', href: group_settings_repository_path(group))
+ end
+
+ it 'has a link to the CI/CD settings page' do
+ render
+
+ expect(rendered).to have_link('CI/CD', href: group_settings_ci_cd_path(group))
+ end
+
+ it 'has a link to the Applications settings page' do
+ render
+
+ expect(rendered).to have_link('Applications', href: group_settings_applications_path(group))
+ end
+
+ it 'has a link to the Package & Registries settings page' do
+ render
+
+ expect(rendered).to have_link('Packages & Registries', href: group_settings_packages_and_registries_path(group))
+ end
+ end
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index f2de43dfd19..3afebfbedab 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -640,9 +640,9 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).to have_link('Infrastructure Registry', href: project_infrastructure_registry_index_path(project))
end
- context 'when feature flag :infrastructure_registry_page is disabled' do
+ context 'when package registry config is disabled' do
it 'does not show link to package registry page' do
- stub_feature_flags(infrastructure_registry_page: false)
+ stub_config(packages: { enabled: false })
render
diff --git a/spec/views/projects/commits/show.html.haml_spec.rb b/spec/views/projects/commits/show.html.haml_spec.rb
new file mode 100644
index 00000000000..e5e9906a798
--- /dev/null
+++ b/spec/views/projects/commits/show.html.haml_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/commits/show.html.haml' do
+ let(:project) { create(:project, :repository) }
+ let(:commits) { [project.commit] }
+ let(:path) { 'path/to/doc.md' }
+
+ before do
+ assign(:project, project)
+ assign(:id, path)
+ assign(:repository, project.repository)
+ assign(:commits, commits)
+ assign(:hidden_commit_count, 0)
+
+ controller.params[:controller] = 'projects/commits'
+ controller.params[:action] = 'show'
+ controller.params[:namespace_id] = project.namespace.to_param
+ controller.params[:project_id] = project.to_param
+
+ allow(view).to receive(:current_user).and_return(nil)
+ allow(view).to receive(:namespace_project_signatures_path).and_return("/")
+ end
+
+ context 'tree controls' do
+ before do
+ render
+ end
+
+ it 'renders atom feed button with matching path' do
+ expect(rendered).to have_link(href: "#{project_commits_path(project, path)}?format=atom")
+ end
+ end
+end
diff --git a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
deleted file mode 100644
index 2fb7b6187eb..00000000000
--- a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/deployments/_confirm_rollback_modal' do
- let(:environment) { create(:environment, :with_review_app) }
- let(:deployments) { environment.deployments }
- let(:project) { environment.project }
-
- before do
- assign(:environment, environment)
- assign(:deployments, deployments)
- assign(:project, project)
- end
-
- context 'when re-deploying last deployment' do
- let(:deployment) { deployments.first }
-
- before do
- allow(view).to receive(:deployment).and_return(deployment)
- end
-
- it 'shows "re-deploy"' do
- render
-
- expect(rendered).to have_selector('h4', text: "Re-deploy environment #{environment.name}?")
- expect(rendered).to have_selector('p', text: "This action will relaunch the job for commit #{deployment.short_sha}, putting the environment in a previous version. Are you sure you want to continue?")
- expect(rendered).to have_selector('a.btn-danger', text: 'Re-deploy')
- end
-
- it 'links to re-deploying the environment' do
- expected_link = retry_project_job_path(environment.project, deployment.deployable)
-
- render
-
- expect(rendered).to have_selector("a[href='#{expected_link}']", text: 'Re-deploy')
- end
- end
-
- context 'when rolling back to previous deployment' do
- let(:deployment) { create(:deployment, environment: environment) }
-
- before do
- allow(view).to receive(:deployment).and_return(deployment)
- end
-
- it 'shows "rollback"' do
- render
-
- expect(rendered).to have_selector('h4', text: "Rollback environment #{environment.name}?")
- expect(rendered).to have_selector('p', text: "This action will run the job defined by #{environment.name} for commit #{deployment.short_sha}, putting the environment in a previous version. You can revert it by re-deploying the latest version of your application. Are you sure you want to continue?")
- expect(rendered).to have_selector('a.btn-danger', text: 'Rollback')
- end
-
- it 'links to re-deploying the environment' do
- expected_link = retry_project_job_path(environment.project, deployment.deployable)
-
- render
-
- expect(rendered).to have_selector("a[href='#{expected_link}']", text: 'Rollback')
- end
- end
-end
diff --git a/spec/views/projects/empty.html.haml_spec.rb b/spec/views/projects/empty.html.haml_spec.rb
index 0fb0ae5ff29..70da4fc9e27 100644
--- a/spec/views/projects/empty.html.haml_spec.rb
+++ b/spec/views/projects/empty.html.haml_spec.rb
@@ -6,7 +6,10 @@ RSpec.describe 'projects/empty' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { ProjectPresenter.new(create(:project, :empty_repo), current_user: user) }
+ let(:can_admin_project_member) { true }
+
before do
+ allow(view).to receive(:can_admin_project_member?).and_return(can_admin_project_member)
allow(view).to receive(:experiment_enabled?).and_return(true)
allow(view).to receive(:current_user).and_return(user)
assign(:project, project)
@@ -47,12 +50,6 @@ RSpec.describe 'projects/empty' do
end
context 'with invite button on empty projects' do
- let(:can_import_members) { true }
-
- before do
- allow(view).to receive(:can_import_members?).and_return(can_import_members)
- end
-
it 'shows invite members info', :aggregate_failures do
render
@@ -68,7 +65,7 @@ RSpec.describe 'projects/empty' do
end
context 'when user does not have permissions to invite members' do
- let(:can_import_members) { false }
+ let(:can_admin_project_member) { false }
it 'does not show invite member info', :aggregate_failures do
render
diff --git a/spec/views/projects/merge_requests/show.html.haml_spec.rb b/spec/views/projects/merge_requests/show.html.haml_spec.rb
index 40d11342ec4..6b6bc1f0b14 100644
--- a/spec/views/projects/merge_requests/show.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/show.html.haml_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'projects/merge_requests/show.html.haml', :aggregate_failures do
+ using RSpec::Parameterized::TableSyntax
+
include_context 'merge request show action'
before do
@@ -43,4 +45,32 @@ RSpec.describe 'projects/merge_requests/show.html.haml', :aggregate_failures do
end
end
end
+
+ describe 'gitpod modal' do
+ let(:gitpod_modal_selector) { '#modal-enable-gitpod' }
+ let(:user) { create(:user) }
+ let(:user_gitpod_enabled) { create(:user).tap { |x| x.update!(gitpod_enabled: true) } }
+
+ where(:site_enabled, :current_user, :should_show) do
+ false | ref(:user) | false
+ true | ref(:user) | true
+ true | nil | true
+ true | ref(:user_gitpod_enabled) | false
+ end
+
+ with_them do
+ it 'handles rendering gitpod user enable modal' do
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_enabled).and_return(site_enabled)
+ allow(view).to receive(:current_user).and_return(current_user)
+
+ render
+
+ if should_show
+ expect(rendered).to have_css(gitpod_modal_selector)
+ else
+ expect(rendered).to have_no_css(gitpod_modal_selector)
+ end
+ end
+ end
+ end
end
diff --git a/spec/views/projects/tree/show.html.haml_spec.rb b/spec/views/projects/tree/show.html.haml_spec.rb
index bdf9b08d8f5..62a52bcf83f 100644
--- a/spec/views/projects/tree/show.html.haml_spec.rb
+++ b/spec/views/projects/tree/show.html.haml_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe 'projects/tree/show' do
before do
assign(:project, project)
assign(:repository, repository)
- assign(:lfs_blob_ids, [])
allow(view).to receive(:can?).and_return(true)
allow(view).to receive(:can_collaborate_with_project?).and_return(true)
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index ecdef7918de..d9c5d348e15 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'registrations/welcome/show' do
- let(:is_gitlab_com) { false }
-
let_it_be(:user) { create(:user) }
before do
@@ -13,7 +11,6 @@ RSpec.describe 'registrations/welcome/show' do
allow(view).to receive(:in_trial_flow?).and_return(false)
allow(view).to receive(:user_has_memberships?).and_return(false)
allow(view).to receive(:in_oauth_flow?).and_return(false)
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
render
end
@@ -22,24 +19,5 @@ RSpec.describe 'registrations/welcome/show' do
it { is_expected.not_to have_selector('label[for="user_setup_for_company"]') }
it { is_expected.to have_button('Get started!') }
- it { is_expected.to have_selector('input[name="user[email_opted_in]"]') }
-
- describe 'email opt in' do
- context 'when on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it 'hides the email-opt in by default' do
- expect(subject).to have_css('.js-email-opt-in.hidden')
- end
- end
-
- context 'when not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it 'hides the email-opt in by default' do
- expect(subject).not_to have_css('.js-email-opt-in.hidden')
- expect(subject).to have_css('.js-email-opt-in')
- end
- end
- end
+ it { is_expected.not_to have_selector('input[name="user[email_opted_in]"]') }
end
diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb
index eb763d424d3..a336ec91ff2 100644
--- a/spec/views/search/show.html.haml_spec.rb
+++ b/spec/views/search/show.html.haml_spec.rb
@@ -48,21 +48,50 @@ RSpec.describe 'search/show' do
assign(:group, group)
end
- it 'renders meta tags for a group' do
- render
+ context 'search with full count' do
+ before do
+ assign(:without_count, false)
+ end
+
+ it 'renders meta tags for a group' do
+ render
+
+ expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
+ end
- expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
- expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
+ it 'renders meta tags for both group and project' do
+ project = build(:project, group: group)
+ assign(:project, project)
+
+ render
+
+ expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ end
end
- it 'renders meta tags for both group and project' do
- project = build(:project, group: group)
- assign(:project, project)
+ context 'search without full count' do
+ before do
+ assign(:without_count, true)
+ end
+
+ it 'renders meta tags for a group' do
+ render
+
+ expect(view.page_description).to match(/issues results for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
+ end
+
+ it 'renders meta tags for both group and project' do
+ project = build(:project, group: group)
+ assign(:project, project)
- render
+ render
- expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
- expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ expect(view.page_description).to match(/issues results for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ end
end
end
end
diff --git a/spec/views/shared/access_tokens/_table.html.haml_spec.rb b/spec/views/shared/access_tokens/_table.html.haml_spec.rb
new file mode 100644
index 00000000000..489675b5683
--- /dev/null
+++ b/spec/views/shared/access_tokens/_table.html.haml_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'shared/access_tokens/_table.html.haml' do
+ let(:type) { 'token' }
+ let(:type_plural) { 'tokens' }
+ let(:empty_message) { nil }
+ let(:token_expiry_enforced?) { false }
+ let(:impersonation) { false }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:tokens) { [create(:personal_access_token, user: user)] }
+ let_it_be(:project) { false }
+
+ before do
+ stub_licensed_features(enforce_personal_access_token_expiration: true)
+ allow(Gitlab::CurrentSettings).to receive(:enforce_pat_expiration?).and_return(false)
+
+ allow(view).to receive(:personal_access_token_expiration_enforced?).and_return(token_expiry_enforced?)
+ allow(view).to receive(:show_profile_token_expiry_notification?).and_return(true)
+ allow(view).to receive(:distance_of_time_in_words_to_now).and_return('4 days')
+
+ if project
+ project.add_maintainer(user)
+ end
+
+ # Forcibly removing scopes from one token as it's not possible to do with the current modal on creation
+ # But the check exists in the template (it may be there for legacy reasons), so we should test the outcome
+ if tokens.size > 1
+ tokens[1].scopes = []
+ end
+
+ locals = {
+ type: type,
+ type_plural: type_plural,
+ active_tokens: tokens,
+ project: project,
+ impersonation: impersonation,
+ revoke_route_helper: ->(token) { 'path/' }
+ }
+
+ if empty_message
+ locals[:no_active_tokens_message] = empty_message
+ end
+
+ render partial: 'shared/access_tokens/table', locals: locals
+ end
+
+ context 'if personal' do
+ it 'does not show non-personal content', :aggregate_failures do
+ expect(rendered).not_to have_content 'To see all the user\'s personal access tokens you must impersonate them first.'
+ expect(rendered).not_to have_selector 'th', text: 'Role'
+ end
+
+ context 'if token expiration is enforced' do
+ let(:token_expiry_enforced?) { true }
+
+ it 'does not show the subtext' do
+ expect(rendered).not_to have_content 'Personal access tokens are not revoked upon expiration.'
+ end
+ end
+
+ context 'if token expiration is not enforced' do
+ let(:token_expiry_enforced?) { false }
+
+ it 'does show the subtext' do
+ expect(rendered).to have_content 'Personal access tokens are not revoked upon expiration.'
+ end
+ end
+ end
+
+ context 'if impersonation' do
+ let(:impersonation) { true }
+
+ it 'shows the impersonation content', :aggregate_failures do
+ expect(rendered).to have_content 'To see all the user\'s personal access tokens you must impersonate them first.'
+
+ expect(rendered).not_to have_content 'Personal access tokens are not revoked upon expiration.'
+ expect(rendered).not_to have_selector 'th', text: 'Role'
+ end
+ end
+
+ context 'if project' do
+ let_it_be(:project) { create(:project) }
+
+ it 'shows the project content', :aggregate_failures do
+ expect(rendered).to have_selector 'th', text: 'Role'
+ expect(rendered).to have_selector 'td', text: 'Maintainer'
+
+ expect(rendered).not_to have_content 'Personal access tokens are not revoked upon expiration.'
+ expect(rendered).not_to have_content 'To see all the user\'s personal access tokens you must impersonate them first.'
+ end
+ end
+
+ context 'without tokens' do
+ let_it_be(:tokens) { [] }
+
+ it 'has the correct content', :aggregate_failures do
+ expect(rendered).to have_content 'Active tokens (0)'
+ expect(rendered).to have_content 'This user has no active tokens.'
+ end
+
+ context 'with a custom empty text' do
+ let(:empty_message) { 'Custom empty message' }
+
+ it 'shows the custom empty text' do
+ expect(rendered).to have_content empty_message
+ end
+ end
+ end
+
+ context 'with tokens' do
+ let_it_be(:tokens) do
+ [
+ create(:personal_access_token, user: user, name: 'Access token', last_used_at: 1.day.ago, expires_at: nil),
+ create(:personal_access_token, user: user, expires_at: 5.days.ago),
+ create(:personal_access_token, user: user, expires_at: Time.now),
+ create(:personal_access_token, user: user, expires_at: 5.days.from_now, scopes: [:read_api, :read_user])
+ ]
+ end
+
+ it 'has the correct content', :aggregate_failures do
+ # Heading content
+ expect(rendered).to have_content 'Active tokens (4)'
+
+ # Table headers
+ expect(rendered).to have_selector 'th', text: 'Token name'
+ expect(rendered).to have_selector 'th', text: 'Scopes'
+ expect(rendered).to have_selector 'th', text: 'Created'
+ expect(rendered).to have_selector 'th', text: 'Last Used'
+ expect(rendered).to have_selector 'th', text: 'Expires'
+
+ # Table contents
+ expect(rendered).to have_content 'Access token'
+ expect(rendered).to have_content 'read_api, read_user'
+ expect(rendered).to have_content 'no scopes selected'
+ expect(rendered).to have_content Time.now.to_date.to_s(:medium)
+ expect(rendered).to have_content l(1.day.ago, format: "%b %d, %Y")
+
+ # Expiry
+ expect(rendered).to have_content 'Expired', count: 2
+ expect(rendered).to have_content 'In 4 days'
+
+ # Revoke buttons
+ expect(rendered).to have_link 'Revoke', href: 'path/', class: 'btn-danger-secondary', count: 1
+ expect(rendered).to have_link 'Revoke', href: 'path/', count: 4
+ end
+
+ context 'without the last used time' do
+ let_it_be(:tokens) { [create(:personal_access_token, user: user, expires_at: 5.days.ago)] }
+
+ it 'shows the last used empty text' do
+ expect(rendered).to have_content 'Never'
+ end
+ end
+
+ context 'without expired at' do
+ let_it_be(:tokens) { [create(:personal_access_token, user: user, expires_at: nil, last_used_at: 1.day.ago)] }
+
+ it 'shows the expired at empty text' do
+ expect(rendered).to have_content 'Never'
+ end
+ end
+ end
+end
diff --git a/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb b/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb
index 3508ba8cca9..5ac42952f78 100644
--- a/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb
+++ b/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe 'shared/deploy_tokens/_form.html.haml' do
render 'shared/deploy_tokens/form', token: token, group_or_project: subject
if shows_package_registry_permissions
- expect(rendered).to have_content('Allows read access to the package registry')
+ expect(rendered).to have_content('Allows read-only access to the package registry.')
else
- expect(rendered).not_to have_content('Allows read access to the package registry')
+ expect(rendered).not_to have_content('Allows read-only access to the package registry.')
end
end
end
diff --git a/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb b/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb
index 9e4c82ee981..dd180229d12 100644
--- a/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb
+++ b/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Analytics::UsageTrends::CounterJobWorker do
let(:job_args) { [users_measurement_identifier, user_1.id, user_2.id, recorded_at] }
before do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(::Analytics::UsageTrends::Measurement.connection).to receive(:transaction_open?).and_return(false)
end
include_examples 'an idempotent worker' do
diff --git a/spec/workers/authorized_project_update/project_recalculate_worker_spec.rb b/spec/workers/authorized_project_update/project_recalculate_worker_spec.rb
index 403793a15e2..a9a15565580 100644
--- a/spec/workers/authorized_project_update/project_recalculate_worker_spec.rb
+++ b/spec/workers/authorized_project_update/project_recalculate_worker_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe AuthorizedProjectUpdate::ProjectRecalculateWorker do
end
context 'exclusive lease' do
- let(:lock_key) { "#{described_class.name.underscore}/#{project.root_namespace.id}" }
+ let(:lock_key) { "#{described_class.name.underscore}/projects/#{project.id}" }
let(:timeout) { 10.seconds }
context 'when exclusive lease has not been taken' do
diff --git a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
index c27629c3a15..027ce3b7f89 100644
--- a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
@@ -44,11 +44,7 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshFromReplicaWorker do
end
end
- context 'with load balancing enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
+ context 'with load balancing enabled', :db_load_balancing do
it 'reads from the replica database' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 6b7162ee886..4e34d2348d6 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe BuildFinishedWorker do
end
it 'calculates coverage and calls hooks', :aggregate_failures do
- expect(build).to receive(:parse_trace_sections!).ordered
expect(build).to receive(:update_coverage).ordered
expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
diff --git a/spec/workers/ci/build_finished_worker_spec.rb b/spec/workers/ci/build_finished_worker_spec.rb
index 374ecd8619f..9096b0d2ba9 100644
--- a/spec/workers/ci/build_finished_worker_spec.rb
+++ b/spec/workers/ci/build_finished_worker_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe Ci::BuildFinishedWorker do
end
it 'calculates coverage and calls hooks', :aggregate_failures do
- expect(build).to receive(:parse_trace_sections!).ordered
expect(build).to receive(:update_coverage).ordered
expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 4c96daea7b3..c1ac5ffebe8 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -21,6 +21,12 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
end.new
end
+ let_it_be(:project) { create(:project, :import_started) }
+
+ let(:importer_class) { double(:importer_class, name: 'klass_name') }
+ let(:importer_instance) { double(:importer_instance) }
+ let(:client) { double(:client) }
+
before do
stub_const('MockRepresantation', Class.new do
include Gitlab::GithubImport::Representation::ToHash
@@ -38,12 +44,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
end)
end
- describe '#import', :clean_gitlab_redis_shared_state do
- let(:importer_class) { double(:importer_class, name: 'klass_name') }
- let(:importer_instance) { double(:importer_instance) }
- let(:project) { double(:project, full_path: 'foo/bar', id: 1) }
- let(:client) { double(:client) }
-
+ describe '#import', :clean_gitlab_redis_cache do
before do
expect(worker)
.to receive(:importer_class)
@@ -60,26 +61,23 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(importer_instance)
.to receive(:execute)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- github_id: 1,
- message: 'starting importer',
- import_source: :github,
- project_id: 1,
- importer: 'klass_name'
- )
- expect(logger)
- .to receive(:info)
- .with(
- github_id: 1,
- message: 'importer finished',
- import_source: :github,
- project_id: 1,
- importer: 'klass_name'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ github_id: 1,
+ message: 'starting importer',
+ project_id: project.id,
+ importer: 'klass_name'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ github_id: 1,
+ message: 'importer finished',
+ project_id: project.id,
+ importer: 'klass_name'
+ )
worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
@@ -100,74 +98,45 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
.to receive(:execute)
.and_raise(exception)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- github_id: 1,
- message: 'starting importer',
- import_source: :github,
- project_id: project.id,
- importer: 'klass_name'
- )
- expect(logger)
- .to receive(:error)
- .with(
- github_id: 1,
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- importer: 'klass_name',
- 'error.message': 'some error',
- 'github.data': {
- 'github_id' => 1,
- 'number' => 10
- }
- )
- end
-
- expect(Gitlab::ErrorTracking)
- .to receive(:track_and_raise_exception)
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
.with(
- exception,
- import_source: :github,
github_id: 1,
- project_id: 1,
+ message: 'starting importer',
+ project_id: project.id,
importer: 'klass_name'
- ).and_call_original
+ )
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: 'klass_name'
+ )
+ .and_call_original
+
+ worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
+
+ expect(project.import_state.reload.status).to eq('started')
- expect { worker.import(project, client, { 'number' => 10, 'github_id' => 1 }) }
- .to raise_error(exception)
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
end
it 'logs error when representation does not have a github_id' do
expect(importer_class).not_to receive(:new)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- github_id: nil,
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- importer: 'klass_name',
- 'error.message': 'key not found: :github_id',
- 'github.data': {
- 'number' => 10
- }
- )
- end
-
- expect(Gitlab::ErrorTracking)
- .to receive(:track_and_raise_exception)
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
.with(
- an_instance_of(KeyError),
- import_source: :github,
- github_id: nil,
- project_id: 1,
- importer: 'klass_name'
- ).and_call_original
+ project_id: project.id,
+ exception: a_kind_of(KeyError),
+ error_source: 'klass_name',
+ fail_import: true
+ )
+ .and_call_original
expect { worker.import(project, client, { 'number' => 10 }) }
.to raise_error(KeyError, 'key not found: :github_id')
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index 651ea77a71c..aeb86f5aa8c 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::StageMethods do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :import_started, import_url: 'https://t0ken@github.com/repo/repo.git') }
+
let(:worker) do
Class.new do
def self.name
@@ -15,8 +16,6 @@ RSpec.describe Gitlab::GithubImport::StageMethods do
end
describe '#perform' do
- let(:project) { create(:project, import_url: 'https://t0ken@github.com/repo/repo.git') }
-
it 'returns if no project could be found' do
expect(worker).not_to receive(:try_import)
@@ -36,71 +35,119 @@ RSpec.describe Gitlab::GithubImport::StageMethods do
an_instance_of(Project)
)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: 'DummyStage'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'stage finished',
+ project_id: project.id,
+ import_stage: 'DummyStage'
+ )
+
+ worker.perform(project.id)
+ end
+
+ context 'when abort_on_failure is false' do
+ it 'logs error when import fails' do
+ exception = StandardError.new('some error')
+
+ allow(worker)
+ .to receive(:find_project)
+ .with(project.id)
+ .and_return(project)
+
+ expect(worker)
+ .to receive(:try_import)
+ .and_raise(exception)
+
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
message: 'starting stage',
- import_source: :github,
project_id: project.id,
import_stage: 'DummyStage'
)
- expect(logger)
- .to receive(:info)
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
.with(
- message: 'stage finished',
- import_source: :github,
project_id: project.id,
- import_stage: 'DummyStage'
- )
- end
+ exception: exception,
+ error_source: 'DummyStage',
+ fail_import: false
+ ).and_call_original
- worker.perform(project.id)
+ expect { worker.perform(project.id) }
+ .to raise_error(exception)
+
+ expect(project.import_state.reload.status).to eq('started')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
end
- it 'logs error when import fails' do
- exception = StandardError.new('some error')
+ context 'when abort_on_failure is true' do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'DummyStage'
+ end
- allow(worker)
- .to receive(:find_project)
- .with(project.id)
- .and_return(project)
+ def abort_on_failure
+ true
+ end
- expect(worker)
- .to receive(:try_import)
- .and_raise(exception)
+ include(Gitlab::GithubImport::StageMethods)
+ end.new
+ end
+
+ it 'logs, captures and re-raises the exception and also marks the import as failed' do
+ exception = StandardError.new('some error')
+
+ allow(worker)
+ .to receive(:find_project)
+ .with(project.id)
+ .and_return(project)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
+ expect(worker)
+ .to receive(:try_import)
+ .and_raise(exception)
+
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
message: 'starting stage',
- import_source: :github,
project_id: project.id,
import_stage: 'DummyStage'
)
- expect(logger)
- .to receive(:error)
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
.with(
- message: 'stage failed',
- import_source: :github,
project_id: project.id,
- import_stage: 'DummyStage',
- 'error.message': 'some error'
- )
- end
+ exception: exception,
+ error_source: 'DummyStage',
+ fail_import: true
+ ).and_call_original
- expect(Gitlab::ErrorTracking)
- .to receive(:track_and_raise_exception)
- .with(
- exception,
- import_source: :github,
- project_id: project.id,
- import_stage: 'DummyStage'
- )
- .and_call_original
+ expect { worker.perform(project.id) }.to raise_error(exception)
+
+ expect(project.import_state.reload.status).to eq('failed')
+ expect(project.import_state.last_error).to eq('some error')
- expect { worker.perform(project.id) }.to raise_error(exception)
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
end
end
@@ -132,16 +179,14 @@ RSpec.describe Gitlab::GithubImport::StageMethods do
end
describe '#find_project' do
- let(:import_state) { create(:import_state, project: project) }
-
it 'returns a Project for an existing ID' do
- import_state.update_column(:status, 'started')
+ project.import_state.update_column(:status, 'started')
expect(worker.find_project(project.id)).to eq(project)
end
it 'returns nil for a project that failed importing' do
- import_state.update_column(:status, 'failed')
+ project.import_state.update_column(:status, 'failed')
expect(worker.find_project(project.id)).to be_nil
end
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index 506124216af..fdba67638c1 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(ContainerExpirationPolicies::CleanupService)
.to receive(:new).with(repository).and_return(double(execute: service_response))
expect_log_extra_metadata(service_response: service_response)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
subject
end
@@ -35,6 +36,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(ContainerExpirationPolicies::CleanupService)
.to receive(:new).with(repository).and_return(double(execute: service_response))
expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
subject
end
@@ -45,6 +47,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(ContainerExpirationPolicies::CleanupService)
.to receive(:new).with(repository).and_return(double(execute: service_response))
expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: true)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
subject
end
@@ -65,6 +68,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(ContainerExpirationPolicies::CleanupService)
.to receive(:new).with(repository).and_return(double(execute: service_response))
expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: truncated)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
subject
end
@@ -78,6 +82,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(ContainerExpirationPolicies::CleanupService)
.to receive(:new).with(repository).and_return(double(execute: service_response))
expect_log_extra_metadata(service_response: service_response, cleanup_status: :error)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
subject
end
@@ -361,6 +366,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(ContainerExpirationPolicies::CleanupService)
.to receive(:new).with(repository).and_return(double(execute: service_response))
expect_log_extra_metadata(service_response: service_response)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
subject
end
@@ -396,6 +402,11 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_error_message, service_response.message)
end
end
+
+ def expect_log_info(structure)
+ expect(worker.logger)
+ .to receive(:info).with(worker.structured_payload(structure))
+ end
end
describe '#remaining_work_count' do
@@ -446,6 +457,12 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
it { is_expected.to eq(0) }
+
+ it 'does not log a selected container' do
+ expect(worker).not_to receive(:log_info)
+
+ subject
+ end
end
end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 69ddbe5c0f4..9f370b10f6a 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -156,11 +156,7 @@ RSpec.describe ContainerExpirationPolicyWorker do
subject
end
- context 'with load balancing enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
+ context 'with load balancing enabled', :db_load_balancing do
it 'reads the counts from the replica' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
diff --git a/spec/workers/database/drop_detached_partitions_worker_spec.rb b/spec/workers/database/drop_detached_partitions_worker_spec.rb
new file mode 100644
index 00000000000..42c3fa3c188
--- /dev/null
+++ b/spec/workers/database/drop_detached_partitions_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Database::DropDetachedPartitionsWorker do
+ describe '#perform' do
+ subject { described_class.new.perform }
+
+ let(:dropper) { instance_double('DropDetachedPartitions', perform: nil) }
+ let(:monitoring) { instance_double('PartitionMonitoring', report_metrics: nil) }
+
+ before do
+ allow(Gitlab::Database::Partitioning::DetachedPartitionDropper).to receive(:new).and_return(dropper)
+ allow(Gitlab::Database::Partitioning::PartitionMonitoring).to receive(:new).and_return(monitoring)
+ end
+
+ it 'delegates to DropPartitionsPendingDrop' do
+ expect(dropper).to receive(:perform)
+
+ subject
+ end
+
+ it 'reports partition metrics' do
+ expect(monitoring).to receive(:report_metrics)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb
index f1fe7b0fc5d..5d8edf85dd9 100644
--- a/spec/workers/deployments/hooks_worker_spec.rb
+++ b/spec/workers/deployments/hooks_worker_spec.rb
@@ -49,5 +49,10 @@ RSpec.describe Deployments::HooksWorker do
worker.perform(deployment_id: deployment.id, status_changed_at: status_changed_at)
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :load_balancing_for_deployments_hooks_worker,
+ data_consistency: :delayed
end
end
diff --git a/spec/workers/environments/auto_delete_cron_worker_spec.rb b/spec/workers/environments/auto_delete_cron_worker_spec.rb
new file mode 100644
index 00000000000..b18f3da5d10
--- /dev/null
+++ b/spec/workers/environments/auto_delete_cron_worker_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::AutoDeleteCronWorker do
+ include CreateEnvironmentsHelpers
+
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ subject { worker.perform }
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let!(:environment) { create(:environment, :auto_deletable, project: project) }
+
+ it 'deletes the environment' do
+ expect { subject }.to change { Environment.count }.by(-1)
+ end
+
+ context 'when environment is not stopped' do
+ let!(:environment) { create(:environment, :available, auto_delete_at: 1.day.ago, project: project) }
+
+ it 'does not delete the environment' do
+ expect { subject }.not_to change { Environment.count }
+ end
+ end
+
+ context 'when auto_delete_at is null' do
+ let!(:environment) { create(:environment, :stopped, auto_delete_at: nil, project: project) }
+
+ it 'does not delete the environment' do
+ expect { subject }.not_to change { Environment.count }
+ end
+ end
+
+ context 'with multiple deletable environments' do
+ let!(:other_environment) { create(:environment, :auto_deletable, project: project) }
+
+ it 'deletes all deletable environments' do
+ expect { subject }.to change { Environment.count }.by(-2)
+ end
+
+ context 'when loop reached loop limit' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'deletes only one deletable environment' do
+ expect { subject }.to change { Environment.count }.by(-1)
+ end
+ end
+
+ context 'when batch size is less than the number of environments' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'deletes all deletable environments' do
+ expect { subject }.to change { Environment.count }.by(-2)
+ end
+ end
+ end
+
+ context 'with multiple deployments' do
+ it 'deletes the deployment records and refs' do
+ deployment_1 = create(:deployment, environment: environment, project: project)
+ deployment_2 = create(:deployment, environment: environment, project: project)
+ deployment_1.create_ref
+ deployment_2.create_ref
+
+ expect(project.repository.commit(deployment_1.ref_path)).to be_present
+ expect(project.repository.commit(deployment_2.ref_path)).to be_present
+
+ expect { subject }.to change { Deployment.count }.by(-2)
+
+ expect(project.repository.commit(deployment_1.ref_path)).not_to be_present
+ expect(project.repository.commit(deployment_2.ref_path)).not_to be_present
+ end
+ end
+
+ context 'when loop reached timeout' do
+ before do
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ stub_const("#{described_class}::LOOP_LIMIT", 100_000)
+ allow_next_instance_of(described_class) do |worker|
+ allow(worker).to receive(:destroy_in_batch) { true }
+ end
+ end
+
+ it 'does not delete the environment' do
+ expect { subject }.not_to change { Environment.count }
+ end
+ end
+
+ context 'with idempotent flag' do
+ include_examples 'an idempotent worker' do
+ it 'deletes the environment' do
+ expect { subject }.to change { Environment.count }.by(-1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index c75b9b43ef4..ea1f0153f83 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -8,17 +8,17 @@ RSpec.describe 'Every Sidekiq worker' do
end
it 'does not use the default queue' do
- expect(workers_without_defaults.map(&:queue)).not_to include('default')
+ expect(workers_without_defaults.map(&:generated_queue_name)).not_to include('default')
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
- expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(start_with('cronjob:'))
+ expect(Gitlab::SidekiqConfig.cron_workers.map(&:generated_queue_name)).to all(start_with('cronjob:'))
end
it 'has its queue in Gitlab::SidekiqConfig::QUEUE_CONFIG_PATHS', :aggregate_failures do
file_worker_queues = Gitlab::SidekiqConfig.worker_queues.to_set
- worker_queues = Gitlab::SidekiqConfig.workers.map(&:queue).to_set
+ worker_queues = Gitlab::SidekiqConfig.workers.map(&:generated_queue_name).to_set
worker_queues << ActionMailer::MailDeliveryJob.new.queue_name
worker_queues << 'default'
@@ -33,7 +33,7 @@ RSpec.describe 'Every Sidekiq worker' do
config_queues = Gitlab::SidekiqConfig.config_queues.to_set
Gitlab::SidekiqConfig.workers.each do |worker|
- queue = worker.queue
+ queue = worker.generated_queue_name
queue_namespace = queue.split(':').first
expect(config_queues).to include(queue).or(include(queue_namespace))
@@ -430,7 +430,6 @@ RSpec.describe 'Every Sidekiq worker' do
'StageUpdateWorker' => 3,
'StatusPage::PublishWorker' => 5,
'StoreSecurityReportsWorker' => 3,
- 'StoreSecurityScansWorker' => 3,
'SyncSeatLinkRequestWorker' => 20,
'SyncSeatLinkWorker' => 12,
'SystemHookPushWorker' => 3,
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index 8dea24dc74f..132fe1dc618 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -26,21 +26,18 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker do
.to receive(:increment)
.and_call_original
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- message: 'GitHub project import finished',
- import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
- import_source: :github,
- object_counts: {
- 'fetched' => {},
- 'imported' => {}
- },
- project_id: project.id,
- duration_s: a_kind_of(Numeric)
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'GitHub project import finished',
+ import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
+ object_counts: {
+ 'fetched' => {},
+ 'imported' => {}
+ },
+ project_id: project.id,
+ duration_s: a_kind_of(Numeric)
+ )
worker.report_import_time(project)
end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index bc51a44e057..875fc082975 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
let(:project) { double(:project, id: 4) }
+
let(:worker) { described_class.new }
describe '#import' do
@@ -36,15 +37,19 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
context 'when the import fails' do
it 'does not schedule the importing of the base data' do
client = double(:client)
+ exception_class = Gitlab::Git::Repository::NoRepository
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
- expect(instance).to receive(:execute).and_return(false)
+ expect(instance).to receive(:execute).and_raise(exception_class)
end
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.not_to receive(:perform_async)
- worker.import(client, project)
+ expect(worker.abort_on_failure).to eq(true)
+
+ expect { worker.import(client, project) }
+ .to raise_error(exception_class)
end
end
end
diff --git a/spec/workers/gitlab/import/stuck_import_job_spec.rb b/spec/workers/gitlab/import/stuck_import_job_spec.rb
new file mode 100644
index 00000000000..3a1463e98a0
--- /dev/null
+++ b/spec/workers/gitlab/import/stuck_import_job_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Import::StuckImportJob do
+ let_it_be(:project) { create(:project, :import_started, import_source: 'foo/bar') }
+
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'MyStuckProjectImportsWorker'
+ end
+
+ include(Gitlab::Import::StuckImportJob)
+
+ def track_metrics(...)
+ nil
+ end
+
+ def enqueued_import_states
+ ProjectImportState.with_status([:scheduled, :started])
+ end
+ end.new
+ end
+
+ it 'marks the stuck import project as failed and track the error on import_failures' do
+ worker.perform
+
+ expect(project.import_state.reload.status).to eq('failed')
+ expect(project.import_state.last_error).to eq('Import timed out. Import took longer than 86400 seconds')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('Gitlab::Import::StuckImportJob::StuckImportJobError')
+ expect(project.import_failures.last.exception_message).to eq('Import timed out. Import took longer than 86400 seconds')
+ end
+end
diff --git a/spec/workers/jira_connect/forward_event_worker_spec.rb b/spec/workers/jira_connect/forward_event_worker_spec.rb
index adfc071779a..7de9952a1da 100644
--- a/spec/workers/jira_connect/forward_event_worker_spec.rb
+++ b/spec/workers/jira_connect/forward_event_worker_spec.rb
@@ -15,23 +15,23 @@ RSpec.describe JiraConnect::ForwardEventWorker do
let(:client_key) { '123' }
let(:shared_secret) { '123' }
- subject { described_class.new.perform(jira_connect_installation.id, base_path, event_path) }
+ subject(:perform) { described_class.new.perform(jira_connect_installation.id, base_path, event_path) }
- it 'forwards the event including the auth header and deletes the installation' do
+ it 'forwards the event and deletes the installation' do
stub_request(:post, event_url)
expect(Atlassian::Jwt).to receive(:create_query_string_hash).with(event_url, 'POST', base_url).and_return('some_qsh')
expect(Atlassian::Jwt).to receive(:encode).with({ iss: client_key, qsh: 'some_qsh' }, shared_secret).and_return('auth_token')
- expect { subject }.to change(JiraConnectInstallation, :count).by(-1)
+ expect(JiraConnect::RetryRequestWorker).to receive(:perform_async).with(event_url, 'auth_token')
- expect(WebMock).to have_requested(:post, event_url).with(headers: { 'Authorization' => 'JWT auth_token' })
+ expect { perform }.to change(JiraConnectInstallation, :count).by(-1)
end
context 'when installation does not exist' do
let(:jira_connect_installation) { instance_double(JiraConnectInstallation, id: -1) }
it 'does nothing' do
- expect { subject }.not_to change(JiraConnectInstallation, :count)
+ expect { perform }.not_to change(JiraConnectInstallation, :count)
end
end
@@ -39,17 +39,9 @@ RSpec.describe JiraConnect::ForwardEventWorker do
let!(:jira_connect_installation) { create(:jira_connect_installation) }
it 'forwards the event including the auth header' do
- expect { subject }.to change(JiraConnectInstallation, :count).by(-1)
+ expect { perform }.to change(JiraConnectInstallation, :count).by(-1)
- expect(WebMock).not_to have_requested(:post, '*')
- end
- end
-
- context 'when it fails to forward the event' do
- it 'still deletes the installation' do
- allow(Gitlab::HTTP).to receive(:post).and_raise(StandardError)
-
- expect { subject }.to raise_error(StandardError).and change(JiraConnectInstallation, :count).by(-1)
+ expect(JiraConnect::RetryRequestWorker).not_to receive(:perform_async)
end
end
end
diff --git a/spec/workers/jira_connect/retry_request_worker_spec.rb b/spec/workers/jira_connect/retry_request_worker_spec.rb
new file mode 100644
index 00000000000..7a93e5fe41d
--- /dev/null
+++ b/spec/workers/jira_connect/retry_request_worker_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::RetryRequestWorker do
+ describe '#perform' do
+ let(:jwt) { 'some-jwt' }
+ let(:event_url) { 'https://example.com/somewhere' }
+ let(:attempts) { 3 }
+
+ subject(:perform) { described_class.new.perform(event_url, jwt, attempts) }
+
+ it 'sends the request, with the appropriate headers' do
+ expect(JiraConnect::RetryRequestWorker).not_to receive(:perform_in)
+
+ stub_request(:post, event_url)
+
+ perform
+
+ expect(WebMock).to have_requested(:post, event_url).with(headers: { 'Authorization' => 'JWT some-jwt' })
+ end
+
+ context 'when the proxied request fails' do
+ before do
+ stub_request(:post, event_url).to_return(status: 500, body: '', headers: {})
+ end
+
+ it 'arranges to retry the request' do
+ expect(JiraConnect::RetryRequestWorker).to receive(:perform_in).with(1.hour, event_url, jwt, attempts - 1)
+
+ perform
+ end
+
+ context 'when there are no more attempts left' do
+ let(:attempts) { 0 }
+
+ it 'does not retry' do
+ expect(JiraConnect::RetryRequestWorker).not_to receive(:perform_in)
+
+ perform
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/merge_request_mergeability_check_worker_spec.rb b/spec/workers/merge_request_mergeability_check_worker_spec.rb
index 0349de5cbb3..32debcf9651 100644
--- a/spec/workers/merge_request_mergeability_check_worker_spec.rb
+++ b/spec/workers/merge_request_mergeability_check_worker_spec.rb
@@ -10,6 +10,12 @@ RSpec.describe MergeRequestMergeabilityCheckWorker do
it 'does not execute MergeabilityCheckService' do
expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
+ expect(Sidekiq.logger).to receive(:error).once
+ .with(
+ merge_request_id: 1,
+ worker: "MergeRequestMergeabilityCheckWorker",
+ message: 'Failed to find merge request')
+
subject.perform(1)
end
end
@@ -24,6 +30,20 @@ RSpec.describe MergeRequestMergeabilityCheckWorker do
subject.perform(merge_request.id)
end
+
+ it 'structurally logs a failed mergeability check' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request) do |service|
+ expect(service).to receive(:execute).and_return(double(error?: true, message: "solar flares"))
+ end
+
+ expect(Sidekiq.logger).to receive(:error).once
+ .with(
+ merge_request_id: merge_request.id,
+ worker: "MergeRequestMergeabilityCheckWorker",
+ message: 'Failed to check mergeability of merge request: solar flares')
+
+ subject.perform(merge_request.id)
+ end
end
it_behaves_like 'an idempotent worker' do
diff --git a/spec/workers/packages/debian/generate_distribution_worker_spec.rb b/spec/workers/packages/debian/generate_distribution_worker_spec.rb
index a8751ccceae..a4627ec5d36 100644
--- a/spec/workers/packages/debian/generate_distribution_worker_spec.rb
+++ b/spec/workers/packages/debian/generate_distribution_worker_spec.rb
@@ -9,6 +9,9 @@ RSpec.describe Packages::Debian::GenerateDistributionWorker, type: :worker do
subject { described_class.new.perform(container_type, distribution_id) }
+ let(:subject2) { described_class.new.perform(container_type, distribution_id) }
+ let(:subject3) { described_class.new.perform(container_type, distribution_id) }
+
include_context 'with published Debian package'
[:project, :group].each do |container_type|
diff --git a/spec/workers/pipeline_notification_worker_spec.rb b/spec/workers/pipeline_notification_worker_spec.rb
index 9a15864173c..583c4bf1c0c 100644
--- a/spec/workers/pipeline_notification_worker_spec.rb
+++ b/spec/workers/pipeline_notification_worker_spec.rb
@@ -20,5 +20,9 @@ RSpec.describe PipelineNotificationWorker, :mailer do
subject.perform(non_existing_record_id)
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 04a38874905..c111c3164eb 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -5,7 +5,13 @@ require 'spec_helper'
RSpec.describe PostReceive do
include AfterNextHelpers
- let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
+ let(:changes) do
+ <<~EOF
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag
+ EOF
+ end
+
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
let(:gl_repository) { "project-#{project.id}" }
@@ -64,7 +70,6 @@ RSpec.describe PostReceive do
describe '#process_project_changes' do
context 'with an empty project' do
let(:empty_project) { create(:project, :empty_repo) }
- let(:changes) { "123456 789012 refs/heads/tést1\n" }
before do
allow_next(Gitlab::GitPostReceive).to receive(:identify).and_return(empty_project.owner)
@@ -85,14 +90,6 @@ RSpec.describe PostReceive do
perform
end
- it 'tracks an event for the new_project_readme experiment', :experiment do
- expect_next_instance_of(NewProjectReadmeExperiment, :new_project_readme, nil, actor: empty_project.owner) do |e|
- expect(e).to receive(:track_initial_writes).with(empty_project)
- end
-
- perform
- end
-
it 'tracks an event for the empty_repo_upload experiment', :experiment do
expect_next_instance_of(EmptyRepoUploadExperiment) do |e|
expect(e).to receive(:track_initial_write)
@@ -154,8 +151,8 @@ RSpec.describe PostReceive do
context 'branches' do
let(:changes) do
<<~EOF
- 123456 789012 refs/heads/tést1
- 123456 789012 refs/heads/tést2
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést1
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést2
EOF
end
@@ -190,9 +187,9 @@ RSpec.describe PostReceive do
context 'with a default branch' do
let(:changes) do
<<~EOF
- 123456 789012 refs/heads/tést1
- 123456 789012 refs/heads/tést2
- 678912 123455 refs/heads/#{project.default_branch}
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést1
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést2
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/#{project.default_branch}
EOF
end
@@ -208,9 +205,9 @@ RSpec.describe PostReceive do
context 'tags' do
let(:changes) do
<<~EOF
- 654321 210987 refs/tags/tag1
- 654322 210986 refs/tags/tag2
- 654323 210985 refs/tags/tag3
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag1
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag2
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag3
EOF
end
@@ -249,7 +246,7 @@ RSpec.describe PostReceive do
end
context 'merge-requests' do
- let(:changes) { "123456 789012 refs/merge-requests/123" }
+ let(:changes) { "#{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/merge-requests/123" }
it "does not call any of the services" do
expect(Git::ProcessRefChangesService).not_to receive(:new)
@@ -261,7 +258,6 @@ RSpec.describe PostReceive do
end
context 'after project changes hooks' do
- let(:changes) { '123456 789012 refs/heads/tést' }
let(:fake_hook_data) { { event_name: 'repository_update' } }
before do
@@ -313,12 +309,12 @@ RSpec.describe PostReceive do
context 'master' do
let(:default_branch) { 'master' }
- let(:oldrev) { '012345' }
- let(:newrev) { '6789ab' }
+ let(:oldrev) { SeedRepo::Commit::PARENT_ID }
+ let(:newrev) { SeedRepo::Commit::ID }
let(:changes) do
<<~EOF
#{oldrev} #{newrev} refs/heads/#{default_branch}
- 123456 789012 refs/heads/tést2
+ #{oldrev} #{newrev} refs/heads/tést2
EOF
end
@@ -334,8 +330,8 @@ RSpec.describe PostReceive do
context 'branches' do
let(:changes) do
<<~EOF
- 123456 789012 refs/heads/tést1
- 123456 789012 refs/heads/tést2
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést1
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést2
EOF
end
@@ -414,8 +410,8 @@ RSpec.describe PostReceive do
context 'branches' do
let(:changes) do
<<~EOF
- 123456 789012 refs/heads/tést1
- 123456 789012 refs/heads/tést2
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést1
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/heads/tést2
EOF
end
@@ -442,9 +438,9 @@ RSpec.describe PostReceive do
context 'tags' do
let(:changes) do
<<~EOF
- 654321 210987 refs/tags/tag1
- 654322 210986 refs/tags/tag2
- 654323 210985 refs/tags/tag3
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag1
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag2
+ #{SeedRepo::Commit::PARENT_ID} #{SeedRepo::Commit::ID} refs/tags/tag3
EOF
end
diff --git a/spec/workers/propagate_integration_worker_spec.rb b/spec/workers/propagate_integration_worker_spec.rb
index 2461b30a2ed..902e3206d35 100644
--- a/spec/workers/propagate_integration_worker_spec.rb
+++ b/spec/workers/propagate_integration_worker_spec.rb
@@ -4,9 +4,10 @@ require 'spec_helper'
RSpec.describe PropagateIntegrationWorker do
describe '#perform' do
+ let(:project) { create(:project) }
let(:integration) do
Integrations::Pushover.create!(
- template: true,
+ project: project,
active: true,
device: 'MyDevice',
sound: 'mic',
diff --git a/spec/workers/propagate_service_template_worker_spec.rb b/spec/workers/propagate_service_template_worker_spec.rb
deleted file mode 100644
index b692ce3d72b..00000000000
--- a/spec/workers/propagate_service_template_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PropagateServiceTemplateWorker do
- include ExclusiveLeaseHelpers
-
- describe '#perform' do
- it 'calls the propagate service with the template' do
- template = Integrations::Pushover.create!(
- template: true,
- active: true,
- properties: {
- device: 'MyDevice',
- sound: 'mic',
- priority: 4,
- user_key: 'asdf',
- api_key: '123456789'
- })
-
- stub_exclusive_lease("propagate_service_template_worker:#{template.id}",
- timeout: PropagateServiceTemplateWorker::LEASE_TIMEOUT)
-
- expect(Admin::PropagateServiceTemplate)
- .to receive(:propagate)
- .with(template)
-
- subject.perform(template.id)
- end
- end
-end
diff --git a/spec/workers/repository_remove_remote_worker_spec.rb b/spec/workers/repository_remove_remote_worker_spec.rb
index 758f7f75e03..11081ec9b37 100644
--- a/spec/workers/repository_remove_remote_worker_spec.rb
+++ b/spec/workers/repository_remove_remote_worker_spec.rb
@@ -24,37 +24,25 @@ RSpec.describe RepositoryRemoveRemoteWorker do
.and_return(project)
end
- it 'does not remove remote when cannot obtain lease' do
+ it 'does nothing when cannot obtain lease' do
stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
expect(project.repository)
.not_to receive(:remove_remote)
-
expect(subject)
- .to receive(:log_error)
- .with("Cannot obtain an exclusive lease for #{lease_key}. There must be another instance already in execution.")
+ .not_to receive(:log_error)
subject.perform(project.id, remote_name)
end
- it 'removes remote from repository when obtain a lease' do
+ it 'does nothing when obtain a lease' do
stub_exclusive_lease(lease_key, timeout: lease_timeout)
- masterrev = project.repository.find_branch('master').dereferenced_target
- create_remote_branch(remote_name, 'remote_branch', masterrev)
expect(project.repository)
- .to receive(:remove_remote)
- .with(remote_name)
- .and_call_original
+ .not_to receive(:remove_remote)
subject.perform(project.id, remote_name)
end
end
end
-
- def create_remote_branch(remote_name, branch_name, target)
- rugged = rugged_repo(project.repository)
-
- rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", target.id)
- end
end
diff --git a/spec/workers/users/create_statistics_worker_spec.rb b/spec/workers/users/create_statistics_worker_spec.rb
index e3f082313a0..2118cc42f3a 100644
--- a/spec/workers/users/create_statistics_worker_spec.rb
+++ b/spec/workers/users/create_statistics_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Users::CreateStatisticsWorker do
subject { described_class.new.perform }
before do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(UsersStatistics.connection).to receive(:transaction_open?).and_return(false)
end
context 'when successful' do
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
index a86964aa417..0f40177eb7d 100644
--- a/spec/workers/web_hook_worker_spec.rb
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -15,6 +15,10 @@ RSpec.describe WebHookWorker do
subject.perform(project_hook.id, data, hook_name)
end
+ it 'does not error when the WebHook record cannot be found' do
+ expect { subject.perform(non_existing_record_id, data, hook_name) }.not_to raise_error
+ end
+
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed