summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2019-11-19 22:11:55 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2019-11-19 22:11:55 +0000
commit5a8431feceba47fd8e1804d9aa1b1730606b71d5 (patch)
treee5df8e0ceee60f4af8093f5c4c2f934b8abced05 /spec
parent4d477238500c347c6553d335d920bedfc5a46869 (diff)
downloadgitlab-ce-5a8431feceba47fd8e1804d9aa1b1730606b71d5.tar.gz
Add latest changes from gitlab-org/gitlab@12-5-stable-ee
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/abuse_reports_controller_spec.rb4
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb156
-rw-r--r--spec/controllers/admin/identities_controller_spec.rb8
-rw-r--r--spec/controllers/admin/spam_logs_controller_spec.rb6
-rw-r--r--spec/controllers/admin/users_controller_spec.rb2
-rw-r--r--spec/controllers/application_controller_spec.rb34
-rw-r--r--spec/controllers/concerns/confirm_email_warning_spec.rb2
-rw-r--r--spec/controllers/concerns/metrics_dashboard_spec.rb47
-rw-r--r--spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb33
-rw-r--r--spec/controllers/concerns/renders_commits_spec.rb60
-rw-r--r--spec/controllers/concerns/sourcegraph_gon_spec.rb118
-rw-r--r--spec/controllers/google_api/authorizations_controller_spec.rb5
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb146
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb114
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb18
-rw-r--r--spec/controllers/groups_controller_spec.rb8
-rw-r--r--spec/controllers/health_controller_spec.rb134
-rw-r--r--spec/controllers/import/gitlab_controller_spec.rb5
-rw-r--r--spec/controllers/import/phabricator_controller_spec.rb2
-rw-r--r--spec/controllers/ldap/omniauth_callbacks_controller_spec.rb8
-rw-r--r--spec/controllers/metrics_controller_spec.rb4
-rw-r--r--spec/controllers/projects/blame_controller_spec.rb19
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb15
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb146
-rw-r--r--spec/controllers/projects/discussions_controller_spec.rb16
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb4
-rw-r--r--spec/controllers/projects/error_tracking_controller_spec.rb202
-rw-r--r--spec/controllers/projects/grafana_api_controller_spec.rb71
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb2
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb18
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb18
-rw-r--r--spec/controllers/projects/mattermosts_controller_spec.rb13
-rw-r--r--spec/controllers/projects/merge_requests/creations_controller_spec.rb4
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb22
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb246
-rw-r--r--spec/controllers/projects/mirrors_controller_spec.rb4
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb10
-rw-r--r--spec/controllers/projects/pages_domains_controller_spec.rb62
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb6
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb8
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb4
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb159
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb94
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb8
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb3
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb12
-rw-r--r--spec/controllers/projects/tree_controller_spec.rb20
-rw-r--r--spec/controllers/projects/usage_ping_controller_spec.rb64
-rw-r--r--spec/controllers/projects_controller_spec.rb28
-rw-r--r--spec/controllers/registrations_controller_spec.rb94
-rw-r--r--spec/controllers/sessions_controller_spec.rb58
-rw-r--r--spec/controllers/snippets_controller_spec.rb12
-rw-r--r--spec/controllers/users_controller_spec.rb46
-rw-r--r--spec/db/schema_spec.rb48
-rw-r--r--spec/dependencies/omniauth_saml_spec.rb4
-rw-r--r--spec/factories/ci/pipelines.rb44
-rw-r--r--spec/factories/clusters/applications/helm.rb9
-rw-r--r--spec/factories/clusters/clusters.rb20
-rw-r--r--spec/factories/clusters/platforms/kubernetes.rb2
-rw-r--r--spec/factories/clusters/providers/aws.rb3
-rw-r--r--spec/factories/clusters/providers/gcp.rb2
-rw-r--r--spec/factories/commit_statuses.rb2
-rw-r--r--spec/factories/deployments.rb4
-rw-r--r--spec/factories/error_tracking/detailed_error.rb29
-rw-r--r--spec/factories/error_tracking/error_event.rb18
-rw-r--r--spec/factories/grafana_integrations.rb3
-rw-r--r--spec/factories/group_group_links.rb9
-rw-r--r--spec/factories/issues.rb1
-rw-r--r--spec/factories/merge_requests.rb13
-rw-r--r--spec/factories/projects.rb5
-rw-r--r--spec/factories/zoom_meetings.rb18
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb24
-rw-r--r--spec/features/admin/admin_projects_spec.rb5
-rw-r--r--spec/features/admin/admin_settings_spec.rb31
-rw-r--r--spec/features/admin/admin_users_spec.rb4
-rw-r--r--spec/features/admin/clusters/eks_spec.rb29
-rw-r--r--spec/features/calendar_spec.rb10
-rw-r--r--spec/features/clusters/installing_applications_shared_examples.rb31
-rw-r--r--spec/features/commits_spec.rb37
-rw-r--r--spec/features/container_registry_spec.rb2
-rw-r--r--spec/features/cycle_analytics_spec.rb10
-rw-r--r--spec/features/dashboard/projects_spec.rb3
-rw-r--r--spec/features/explore/groups_spec.rb4
-rw-r--r--spec/features/global_search_spec.rb4
-rw-r--r--spec/features/groups/clusters/eks_spec.rb35
-rw-r--r--spec/features/groups/clusters/user_spec.rb8
-rw-r--r--spec/features/groups/group_page_with_external_authorization_service_spec.rb4
-rw-r--r--spec/features/groups/issues_spec.rb4
-rw-r--r--spec/features/groups/milestone_spec.rb141
-rw-r--r--spec/features/groups_spec.rb20
-rw-r--r--spec/features/import/manifest_import_spec.rb2
-rw-r--r--spec/features/issuables/markdown_references/internal_references_spec.rb4
-rw-r--r--spec/features/issuables/markdown_references/jira_spec.rb6
-rw-r--r--spec/features/issuables/sorting_list_spec.rb24
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb11
-rw-r--r--spec/features/issues/filtered_search/dropdown_release_spec.rb55
-rw-r--r--spec/features/issues/notes_on_issues_spec.rb2
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb4
-rw-r--r--spec/features/issues/user_creates_confidential_merge_request_spec.rb2
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb13
-rw-r--r--spec/features/issues/user_toggles_subscription_spec.rb1
-rw-r--r--spec/features/markdown/metrics_spec.rb66
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb6
-rw-r--r--spec/features/merge_request/user_accepts_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb6
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_creates_image_diff_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb7
-rw-r--r--spec/features/merge_request/user_edits_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_expands_diff_spec.rb4
-rw-r--r--spec/features/merge_request/user_merges_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb4
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb5
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb6
-rw-r--r--spec/features/merge_request/user_reverts_merge_request_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb7
-rw-r--r--spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb10
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb10
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb63
-rw-r--r--spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb4
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_toggles_whitespace_changes_spec.rb3
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb3
-rw-r--r--spec/features/merge_requests/user_squashes_merge_request_spec.rb8
-rw-r--r--spec/features/milestones/user_views_milestones_spec.rb27
-rw-r--r--spec/features/populate_new_pipeline_vars_with_params_spec.rb32
-rw-r--r--spec/features/profile_spec.rb2
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb4
-rw-r--r--spec/features/project_group_variables_spec.rb60
-rw-r--r--spec/features/projects/badges/pipeline_badge_spec.rb8
-rw-r--r--spec/features/projects/blobs/edit_spec.rb21
-rw-r--r--spec/features/projects/clusters/eks_spec.rb3
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb6
-rw-r--r--spec/features/projects/clusters/user_spec.rb8
-rw-r--r--spec/features/projects/commit/cherry_pick_spec.rb6
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb10
-rw-r--r--spec/features/projects/compare_spec.rb8
-rw-r--r--spec/features/projects/environments/environment_spec.rb5
-rw-r--r--spec/features/projects/environments/environments_spec.rb4
-rw-r--r--spec/features/projects/features_visibility_spec.rb8
-rw-r--r--spec/features/projects/files/files_sort_submodules_with_folders_spec.rb4
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb3
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb24
-rw-r--r--spec/features/projects/files/user_browses_lfs_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb4
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb20
-rw-r--r--spec/features/projects/files/user_deletes_files_spec.rb4
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb9
-rw-r--r--spec/features/projects/files/user_reads_pipeline_status_spec.rb4
-rw-r--r--spec/features/projects/files/user_replaces_files_spec.rb4
-rw-r--r--spec/features/projects/files/user_uploads_files_spec.rb4
-rw-r--r--spec/features/projects/fork_spec.rb4
-rw-r--r--spec/features/projects/forks/fork_list_spec.rb2
-rw-r--r--spec/features/projects/graph_spec.rb6
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb6
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb8
-rw-r--r--spec/features/projects/jobs_spec.rb4
-rw-r--r--spec/features/projects/labels/search_labels_spec.rb2
-rw-r--r--spec/features/projects/members/member_leaves_project_spec.rb2
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb2
-rw-r--r--spec/features/projects/milestones/milestone_spec.rb106
-rw-r--r--spec/features/projects/pages_lets_encrypt_spec.rb49
-rw-r--r--spec/features/projects/pages_spec.rb115
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb30
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb26
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb25
-rw-r--r--spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb23
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb23
-rw-r--r--spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb8
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb6
-rw-r--r--spec/features/projects/tree/tree_show_spec.rb1
-rw-r--r--spec/features/projects/view_on_env_spec.rb10
-rw-r--r--spec/features/projects_spec.rb20
-rw-r--r--spec/features/raven_js_spec.rb27
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb26
-rw-r--r--spec/features/security/project/internal_access_spec.rb8
-rw-r--r--spec/features/security/project/private_access_spec.rb8
-rw-r--r--spec/features/security/project/public_access_spec.rb8
-rw-r--r--spec/features/sentry_js_spec.rb28
-rw-r--r--spec/features/signed_commits_spec.rb26
-rw-r--r--spec/features/tags/developer_deletes_tag_spec.rb6
-rw-r--r--spec/features/unsubscribe_links_spec.rb2
-rw-r--r--spec/features/user_sees_revert_modal_spec.rb2
-rw-r--r--spec/features/users/anonymous_sessions_spec.rb41
-rw-r--r--spec/features/users/login_spec.rb2
-rw-r--r--spec/features/users/signup_spec.rb10
-rw-r--r--spec/finders/abuse_reports_finder_spec.rb27
-rw-r--r--spec/finders/branches_finder_spec.rb56
-rw-r--r--spec/finders/container_repositories_finder_spec.rb50
-rw-r--r--spec/finders/issues_finder_spec.rb14
-rw-r--r--spec/finders/merge_requests_finder_spec.rb12
-rw-r--r--spec/finders/projects_finder_spec.rb45
-rw-r--r--spec/finders/prometheus_metrics_finder_spec.rb144
-rw-r--r--spec/finders/releases_finder_spec.rb33
-rw-r--r--spec/finders/tags_finder_spec.rb38
-rw-r--r--spec/finders/todos_finder_spec.rb88
-rw-r--r--spec/fixtures/api/schemas/cluster_status.json2
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json2
-rw-r--r--spec/fixtures/api/schemas/error_tracking/error.json20
-rw-r--r--spec/fixtures/api/schemas/error_tracking/error_detailed.json45
-rw-r--r--spec/fixtures/api/schemas/error_tracking/error_stack_trace.json14
-rw-r--r--spec/fixtures/api/schemas/error_tracking/issue_detailed.json11
-rw-r--r--spec/fixtures/api/schemas/error_tracking/issue_stack_trace.json11
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/blobs.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release.json5
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json5
-rw-r--r--spec/fixtures/api/schemas/release.json3
-rw-r--r--spec/fixtures/grafana/dashboard_response.json764
-rw-r--r--spec/fixtures/grafana/datasource_response.json21
-rw-r--r--spec/fixtures/grafana/expected_grafana_embed.json27
-rw-r--r--spec/fixtures/grafana/proxy_response.json459
-rw-r--r--spec/fixtures/grafana/simplified_dashboard_response.json40
-rw-r--r--spec/fixtures/group_export.tar.gzbin0 -> 4551 bytes
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json (renamed from spec/fixtures/lib/gitlab/import_export/project.json)66
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group/project.json (renamed from spec/fixtures/lib/gitlab/import_export/project.group.json)0
-rw-r--r--spec/fixtures/lib/gitlab/import_export/light/project.json (renamed from spec/fixtures/lib/gitlab/import_export/project.light.json)0
-rw-r--r--spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json (renamed from spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json)0
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json1
-rw-r--r--spec/frontend/api_spec.js20
-rw-r--r--spec/frontend/boards/components/issue_time_estimate_spec.js81
-rw-r--r--spec/frontend/boards/issue_card_spec.js307
-rw-r--r--spec/frontend/boards/stores/getters_spec.js21
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js26
-rw-r--r--spec/frontend/clusters/components/applications_spec.js157
-rw-r--r--spec/frontend/clusters/services/crossplane_provider_stack_spec.js78
-rw-r--r--spec/frontend/clusters/services/mock_data.js27
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js43
-rw-r--r--spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap4
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap47
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js69
-rw-r--r--spec/frontend/contributors/store/actions_spec.js60
-rw-r--r--spec/frontend/contributors/store/getters_spec.js73
-rw-r--r--spec/frontend/contributors/store/mutations_spec.js40
-rw-r--r--spec/frontend/contributors/utils_spec.js21
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js44
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js91
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js181
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/region_dropdown_spec.js55
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js117
-rw-r--r--spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js152
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/actions_spec.js248
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js113
-rw-r--r--spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js (renamed from spec/frontend/projects/gke_cluster_namespace/gke_cluster_namespace_spec.js)4
-rw-r--r--spec/frontend/create_cluster/init_create_cluster_spec.js73
-rw-r--r--spec/frontend/cycle_analytics/stage_nav_item_spec.js44
-rw-r--r--spec/frontend/environment.js6
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js105
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js15
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_entry_spec.js49
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_spec.js45
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js94
-rw-r--r--spec/frontend/error_tracking/store/details/getters_spec.js13
-rw-r--r--spec/frontend/error_tracking/store/list/getters_spec.js33
-rw-r--r--spec/frontend/error_tracking/store/list/mutation_spec.js (renamed from spec/frontend/error_tracking/store/mutation_spec.js)4
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js25
-rw-r--r--spec/frontend/error_tracking_settings/store/actions_spec.js16
-rw-r--r--spec/frontend/fixtures/merge_requests.rb18
-rw-r--r--spec/frontend/fixtures/static/environments_logs.html4
-rw-r--r--spec/frontend/fixtures/static/signin_tabs.html3
-rw-r--r--spec/frontend/fixtures/u2f.rb4
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap101
-rw-r--r--spec/frontend/grafana_integration/components/grafana_integration_spec.js125
-rw-r--r--spec/frontend/grafana_integration/store/mutations_spec.js35
-rw-r--r--spec/frontend/helpers/monitor_helper_spec.js82
-rw-r--r--spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap61
-rw-r--r--spec/frontend/ide/components/jobs/stage_spec.js86
-rw-r--r--spec/frontend/ide/components/preview/clientside_spec.js20
-rw-r--r--spec/frontend/ide/services/index_spec.js83
-rw-r--r--spec/frontend/ide/stores/modules/clientside/actions_spec.js39
-rw-r--r--spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap15
-rw-r--r--spec/frontend/issuables_list/components/issuable_spec.js345
-rw-r--r--spec/frontend/issuables_list/components/issuables_list_app_spec.js410
-rw-r--r--spec/frontend/issuables_list/issuable_list_test_data.js72
-rw-r--r--spec/frontend/issue_show/helpers.js10
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js4
-rw-r--r--spec/frontend/jobs/store/utils_spec.js20
-rw-r--r--spec/frontend/lib/utils/chart_utils_spec.js11
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js47
-rw-r--r--spec/frontend/lib/utils/number_utility_spec.js40
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js13
-rw-r--r--spec/frontend/monitoring/charts/time_series_spec.js (renamed from spec/javascripts/monitoring/charts/time_series_spec.js)154
-rw-r--r--spec/frontend/monitoring/components/charts/anomaly_spec.js303
-rw-r--r--spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js10
-rw-r--r--spec/frontend/monitoring/embed/embed_spec.js4
-rw-r--r--spec/frontend/monitoring/embed/mock_data.js4
-rw-r--r--spec/frontend/monitoring/mock_data.js465
-rw-r--r--spec/frontend/monitoring/panel_type_spec.js166
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js (renamed from spec/javascripts/monitoring/store/actions_spec.js)277
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js (renamed from spec/javascripts/monitoring/store/mutations_spec.js)127
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js (renamed from spec/javascripts/monitoring/store/utils_spec.js)0
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js331
-rw-r--r--spec/frontend/notes/components/diff_discussion_header_spec.js141
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_notes_spec.js6
-rw-r--r--spec/frontend/notes/components/note_app_spec.js26
-rw-r--r--spec/frontend/notes/mock_data.js1255
-rw-r--r--spec/frontend/performance_bar/components/add_request_spec.js62
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js75
-rw-r--r--spec/frontend/pipelines/pipeline_triggerer_spec.js (renamed from spec/javascripts/pipelines/pipeline_triggerer_spec.js)5
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js (renamed from spec/javascripts/pipelines/pipelines_table_row_spec.js)138
-rw-r--r--spec/frontend/pipelines/test_reports/mock_data.js123
-rw-r--r--spec/frontend/pipelines/test_reports/stores/actions_spec.js109
-rw-r--r--spec/frontend/pipelines/test_reports/stores/getters_spec.js54
-rw-r--r--spec/frontend/pipelines/test_reports/stores/mutations_spec.js63
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js64
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js77
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_spec.js78
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_table_spec.js54
-rw-r--r--spec/frontend/project_find_file_spec.js37
-rw-r--r--spec/frontend/registry/components/collapsible_container_spec.js53
-rw-r--r--spec/frontend/registry/components/table_registry_spec.js119
-rw-r--r--spec/frontend/releases/detail/components/app_spec.js19
-rw-r--r--spec/frontend/releases/list/components/__snapshots__/release_block_spec.js.snap332
-rw-r--r--spec/frontend/releases/list/components/release_block_footer_spec.js163
-rw-r--r--spec/frontend/releases/list/components/release_block_spec.js33
-rw-r--r--spec/frontend/releases/mock_data.js4
-rw-r--r--spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap75
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap56
-rw-r--r--spec/frontend/repository/components/directory_download_links_spec.js29
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js4
-rw-r--r--spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap36
-rw-r--r--spec/frontend/repository/components/preview/index_spec.js49
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap2
-rw-r--r--spec/frontend/repository/components/table/index_spec.js74
-rw-r--r--spec/frontend/repository/components/table/row_spec.js25
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js71
-rw-r--r--spec/frontend/repository/log_tree_spec.js27
-rw-r--r--spec/frontend/repository/pages/index_spec.js42
-rw-r--r--spec/frontend/repository/pages/tree_spec.js60
-rw-r--r--spec/frontend/repository/utils/commit_spec.js30
-rw-r--r--spec/frontend/repository/utils/dom_spec.js20
-rw-r--r--spec/frontend/repository/utils/readme_spec.js33
-rw-r--r--spec/frontend/repository/utils/title_spec.js4
-rw-r--r--spec/frontend/sentry/index_spec.js (renamed from spec/javascripts/raven/index_spec.js)20
-rw-r--r--spec/frontend/sentry/sentry_config_spec.js214
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js1
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js1
-rw-r--r--spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap37
-rw-r--r--spec/frontend/vue_shared/components/commit_spec.js (renamed from spec/javascripts/vue_shared/components/commit_spec.js)115
-rw-r--r--spec/frontend/vue_shared/components/content_viewer/viewers/image_viewer_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_assignees_spec.js189
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_note_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/notes/system_note_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/slot_switch_spec.js56
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js104
-rw-r--r--spec/frontend/vue_shared/components/table_pagination_spec.js (renamed from spec/javascripts/vue_shared/components/table_pagination_spec.js)175
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js108
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js186
-rw-r--r--spec/graphql/features/authorization_spec.rb3
-rw-r--r--spec/graphql/gitlab_schema_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_assignees_spec.rb106
-rw-r--r--spec/graphql/mutations/merge_requests/set_labels_spec.rb77
-rw-r--r--spec/graphql/mutations/merge_requests/set_locked_spec.rb49
-rw-r--r--spec/graphql/mutations/merge_requests/set_milestone_spec.rb53
-rw-r--r--spec/graphql/mutations/merge_requests/set_subscription_spec.rb42
-rw-r--r--spec/graphql/mutations/todos/mark_done_spec.rb66
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/commit_pipelines_resolver_spec.rb53
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb42
-rw-r--r--spec/graphql/types/base_enum_spec.rb24
-rw-r--r--spec/graphql/types/commit_type_spec.rb2
-rw-r--r--spec/graphql/types/extended_issue_type_spec.rb21
-rw-r--r--spec/graphql/types/issue_sort_enum_spec.rb13
-rw-r--r--spec/graphql/types/issue_type_spec.rb2
-rw-r--r--spec/graphql/types/label_type_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb3
-rw-r--r--spec/graphql/types/tree/blob_type_spec.rb2
-rw-r--r--spec/graphql/types/tree/submodule_type_spec.rb2
-rw-r--r--spec/graphql/types/tree/tree_entry_type_spec.rb2
-rw-r--r--spec/helpers/application_helper_spec.rb4
-rw-r--r--spec/helpers/application_settings_helper_spec.rb23
-rw-r--r--spec/helpers/auth_helper_spec.rb17
-rw-r--r--spec/helpers/clusters_helper_spec.rb56
-rw-r--r--spec/helpers/dashboard_helper_spec.rb61
-rw-r--r--spec/helpers/environments_helper_spec.rb1
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb6
-rw-r--r--spec/helpers/issuables_helper_spec.rb55
-rw-r--r--spec/helpers/markup_helper_spec.rb37
-rw-r--r--spec/helpers/projects_helper_spec.rb18
-rw-r--r--spec/helpers/releases_helper_spec.rb14
-rw-r--r--spec/helpers/search_helper_spec.rb47
-rw-r--r--spec/helpers/snippets_helper_spec.rb206
-rw-r--r--spec/helpers/sourcegraph_helper_spec.rb64
-rw-r--r--spec/helpers/users_helper_spec.rb4
-rw-r--r--spec/initializers/6_validations_spec.rb2
-rw-r--r--spec/initializers/action_mailer_hooks_spec.rb2
-rw-r--r--spec/initializers/asset_proxy_setting_spec.rb2
-rw-r--r--spec/initializers/attr_encrypted_no_db_connection_spec.rb2
-rw-r--r--spec/initializers/database_config_spec.rb73
-rw-r--r--spec/initializers/direct_upload_support_spec.rb2
-rw-r--r--spec/initializers/doorkeeper_spec.rb2
-rw-r--r--spec/initializers/fog_google_https_private_urls_spec.rb2
-rw-r--r--spec/initializers/lograge_spec.rb48
-rw-r--r--spec/initializers/rest-client-hostname_override_spec.rb2
-rw-r--r--spec/initializers/secret_token_spec.rb2
-rw-r--r--spec/initializers/settings_spec.rb2
-rw-r--r--spec/initializers/trusted_proxies_spec.rb2
-rw-r--r--spec/initializers/zz_metrics_spec.rb2
-rw-r--r--spec/javascripts/boards/board_card_spec.js2
-rw-r--r--spec/javascripts/boards/board_list_common_spec.js15
-rw-r--r--spec/javascripts/boards/board_list_spec.js250
-rw-r--r--spec/javascripts/boards/components/boards_selector_spec.js7
-rw-r--r--spec/javascripts/boards/components/issue_time_estimate_spec.js70
-rw-r--r--spec/javascripts/boards/issue_card_spec.js292
-rw-r--r--spec/javascripts/bootstrap_jquery_spec.js14
-rw-r--r--spec/javascripts/ci_variable_list/ajax_variable_list_spec.js2
-rw-r--r--spec/javascripts/diffs/components/diff_file_spec.js49
-rw-r--r--spec/javascripts/diffs/mock_data/diff_file_unreadable.js244
-rw-r--r--spec/javascripts/dropzone_input_spec.js86
-rw-r--r--spec/javascripts/frequent_items/components/app_spec.js2
-rw-r--r--spec/javascripts/frequent_items/mock_data.js4
-rw-r--r--spec/javascripts/frequent_items/store/actions_spec.js7
-rw-r--r--spec/javascripts/graphs/stat_graph_contributors_graph_spec.js152
-rw-r--r--spec/javascripts/graphs/stat_graph_contributors_spec.js28
-rw-r--r--spec/javascripts/graphs/stat_graph_contributors_util_spec.js298
-rw-r--r--spec/javascripts/ide/components/jobs/stage_spec.js95
-rw-r--r--spec/javascripts/ide/components/repo_editor_spec.js4
-rw-r--r--spec/javascripts/ide/stores/actions/file_spec.js56
-rw-r--r--spec/javascripts/ide/stores/actions/merge_request_spec.js28
-rw-r--r--spec/javascripts/ide/stores/actions/tree_spec.js18
-rw-r--r--spec/javascripts/ide/stores/getters_spec.js55
-rw-r--r--spec/javascripts/ide/stores/modules/commit/actions_spec.js6
-rw-r--r--spec/javascripts/ide/stores/utils_spec.js30
-rw-r--r--spec/javascripts/issue_show/helpers.js11
-rw-r--r--spec/javascripts/lib/utils/tick_formats_spec.js40
-rw-r--r--spec/javascripts/merge_request_spec.js32
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js14
-rw-r--r--spec/javascripts/monitoring/charts/heatmap_spec.js69
-rw-r--r--spec/javascripts/monitoring/components/dashboard_spec.js432
-rw-r--r--spec/javascripts/monitoring/mock_data.js1097
-rw-r--r--spec/javascripts/monitoring/panel_type_spec.js79
-rw-r--r--spec/javascripts/monitoring/shared/prometheus_header_spec.js26
-rw-r--r--spec/javascripts/monitoring/utils_spec.js38
-rw-r--r--spec/javascripts/notes/components/comment_form_spec.js301
-rw-r--r--spec/javascripts/notes/components/noteable_discussion_spec.js114
-rw-r--r--spec/javascripts/notes/mock_data.js1256
-rw-r--r--spec/javascripts/notes/stores/collapse_utils_spec.js10
-rw-r--r--spec/javascripts/pipelines/graph/action_component_spec.js81
-rw-r--r--spec/javascripts/raven/raven_config_spec.js254
-rw-r--r--spec/javascripts/search_autocomplete_spec.js68
-rw-r--r--spec/javascripts/sidebar/subscriptions_spec.js21
-rw-r--r--spec/javascripts/signin_tabs_memoizer_spec.js46
-rw-r--r--spec/javascripts/syntax_highlight_spec.js8
-rw-r--r--spec/javascripts/test_bundle.js33
-rw-r--r--spec/javascripts/u2f/mock_u2f_device.js18
-rw-r--r--spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js26
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js74
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js45
-rw-r--r--spec/javascripts/vue_shared/components/icon_spec.js13
-rw-r--r--spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js9
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js120
-rw-r--r--spec/javascripts/vue_shared/components/user_popover/user_popover_spec.js167
-rw-r--r--spec/lib/api/helpers/pagination_spec.rb399
-rw-r--r--spec/lib/api/helpers_spec.rb14
-rw-r--r--spec/lib/backup/repository_spec.rb2
-rw-r--r--spec/lib/banzai/filter/asset_proxy_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb71
-rw-r--r--spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb48
-rw-r--r--spec/lib/banzai/filter/video_link_filter_spec.rb2
-rw-r--r--spec/lib/bitbucket/representation/pull_request_spec.rb1
-rw-r--r--spec/lib/container_registry/client_spec.rb12
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb92
-rw-r--r--spec/lib/gitlab/auth/ldap/auth_hash_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb19
-rw-r--r--spec/lib/gitlab/auth/ldap/person_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration/legacy_uploads_migrator_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb2
-rw-r--r--spec/lib/gitlab/badge/pipeline/status_spec.rb2
-rw-r--r--spec/lib/gitlab/bare_repository_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb1
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/ansi2json/style_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/ansi2json_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/build/context/build_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/build/context/global_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/build/policy/variables_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/entry/artifacts_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/config/entry/commands_spec.rb67
-rw-r--r--spec/lib/gitlab/ci/config/entry/default_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/entry/files_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/config/entry/key_spec.rb94
-rw-r--r--spec/lib/gitlab/ci/config/entry/need_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/needs_spec.rb84
-rw-r--r--spec/lib/gitlab/ci/config/entry/prefix_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb120
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/config/entry/script_spec.rb67
-rw-r--r--spec/lib/gitlab/ci/config/entry/workflow_spec.rb76
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb104
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb60
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb161
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb148
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb261
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb104
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb338
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb9
-rw-r--r--spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb29
-rw-r--r--spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb37
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb6
-rw-r--r--spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb16
-rw-r--r--spec/lib/gitlab/cycle_analytics/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb13
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb18
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb7
-rw-r--r--spec/lib/gitlab/data_builder/push_spec.rb26
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb9
-rw-r--r--spec/lib/gitlab/devise_failure_spec.rb35
-rw-r--r--spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb2
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/exclusive_lease_spec.rb2
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb261
-rw-r--r--spec/lib/gitlab/external_authorization/access_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/client_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/logger_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/response_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization_spec.rb2
-rw-r--r--spec/lib/gitlab/fake_application_settings_spec.rb2
-rw-r--r--spec/lib/gitlab/favicon_spec.rb2
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb2
-rw-r--r--spec/lib/gitlab/file_finder_spec.rb6
-rw-r--r--spec/lib/gitlab/fogbugz_import/client_spec.rb2
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb2
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb2
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb18
-rw-r--r--spec/lib/gitlab/git_access_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb2
-rw-r--r--spec/lib/gitlab/git_ref_validator_spec.rb2
-rw-r--r--spec/lib/gitlab/git_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/diff_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/health_check_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb8
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/storage_settings_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/util_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/wiki_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/bulk_importing_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/caching_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/page_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/parallel_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/note_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/to_hash_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/user_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/sequential_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import_spec.rb2
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb2
-rw-r--r--spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb10
-rw-r--r--spec/lib/gitlab/gpg_spec.rb98
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb51
-rw-r--r--spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb26
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb56
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb42
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb281
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/legacy_keyset_connection_spec.rb127
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb81
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb108
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb117
-rw-r--r--spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb20
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb2
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb8
-rw-r--r--spec/lib/gitlab/health_checks/master_check_spec.rb49
-rw-r--r--spec/lib/gitlab/highlight_spec.rb2
-rw-r--r--spec/lib/gitlab/http_io_spec.rb2
-rw-r--r--spec/lib/gitlab/http_spec.rb2
-rw-r--r--spec/lib/gitlab/i18n_spec.rb2
-rw-r--r--spec/lib/gitlab/identifier_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml13
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/group_project_object_builder_spec.rb36
-rw-r--r--spec/lib/gitlab/import_export/group_tree_saver_spec.rb180
-rw-r--r--spec/lib/gitlab/import_export/import_export_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb114
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/relation_rename_service_spec.rb17
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_saver_spec.rb42
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml13
-rw-r--r--spec/lib/gitlab/import_export/saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/shared_spec.rb2
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb2
-rw-r--r--spec/lib/gitlab/incoming_email_spec.rb2
-rw-r--r--spec/lib/gitlab/insecure_key_fingerprint_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb37
-rw-r--r--spec/lib/gitlab/issuable_metadata_spec.rb2
-rw-r--r--spec/lib/gitlab/issuable_sorter_spec.rb2
-rw-r--r--spec/lib/gitlab/issuables_count_for_state_spec.rb2
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb2
-rw-r--r--spec/lib/gitlab/json_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/config_maps/aws_node_auth_spec.rb33
-rw-r--r--spec/lib/gitlab/kubernetes/helm/install_command_spec.rb27
-rw-r--r--spec/lib/gitlab/kubernetes/helm/pod_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes_spec.rb2
-rw-r--r--spec/lib/gitlab/language_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/lazy_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb11
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb106
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb85
-rw-r--r--spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb76
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb215
-rw-r--r--spec/lib/gitlab/phabricator_import/project_creator_spec.rb2
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb167
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb14
-rw-r--r--spec/lib/gitlab/project_template_spec.rb3
-rw-r--r--spec/lib/gitlab/prometheus/internal_spec.rb108
-rw-r--r--spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb13
-rw-r--r--spec/lib/gitlab/regex_spec.rb19
-rw-r--r--spec/lib/gitlab/search/found_blob_spec.rb24
-rw-r--r--spec/lib/gitlab/shell_spec.rb76
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb113
-rw-r--r--spec/lib/gitlab/slash_commands/command_spec.rb5
-rw-r--r--spec/lib/gitlab/slash_commands/issue_comment_spec.rb117
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/access_spec.rb10
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb37
-rw-r--r--spec/lib/gitlab/sourcegraph_spec.rb66
-rw-r--r--spec/lib/gitlab/sql/recursive_cte_spec.rb2
-rw-r--r--spec/lib/gitlab/sql/union_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking_spec.rb19
-rw-r--r--spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb34
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb54
-rw-r--r--spec/lib/gitlab/user_access_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/deep_size_spec.rb6
-rw-r--r--spec/lib/gitlab/visibility_level_checker_spec.rb2
-rw-r--r--spec/lib/gitlab/wiki_file_finder_spec.rb2
-rw-r--r--spec/lib/gitlab_spec.rb42
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb3
-rw-r--r--spec/lib/grafana/client_spec.rb26
-rw-r--r--spec/lib/omni_auth/strategies/saml_spec.rb2
-rw-r--r--spec/lib/prometheus/pid_provider_spec.rb12
-rw-r--r--spec/lib/quality/helm_client_spec.rb20
-rw-r--r--spec/lib/quality/kubernetes_client_spec.rb52
-rw-r--r--spec/lib/sentry/client_spec.rb9
-rw-r--r--spec/mailers/abuse_report_mailer_spec.rb2
-rw-r--r--spec/mailers/emails/merge_requests_spec.rb2
-rw-r--r--spec/mailers/emails/pages_domains_spec.rb2
-rw-r--r--spec/mailers/emails/profile_spec.rb2
-rw-r--r--spec/mailers/emails/releases_spec.rb1
-rw-r--r--spec/mailers/notify_spec.rb2
-rw-r--r--spec/mailers/repository_check_mailer_spec.rb2
-rw-r--r--spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb4
-rw-r--r--spec/migrations/active_record/schema_spec.rb2
-rw-r--r--spec/migrations/add_default_and_free_plans_spec.rb34
-rw-r--r--spec/migrations/add_foreign_keys_to_todos_spec.rb2
-rw-r--r--spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb2
-rw-r--r--spec/migrations/add_pages_access_level_to_project_feature_spec.rb2
-rw-r--r--spec/migrations/add_pipeline_build_foreign_key_spec.rb2
-rw-r--r--spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb2
-rw-r--r--spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb2
-rw-r--r--spec/migrations/backfill_store_project_full_path_in_repo_spec.rb8
-rw-r--r--spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb2
-rw-r--r--spec/migrations/cleanup_build_stage_migration_spec.rb2
-rw-r--r--spec/migrations/cleanup_environments_external_url_spec.rb2
-rw-r--r--spec/migrations/cleanup_stages_position_migration_spec.rb2
-rw-r--r--spec/migrations/create_missing_namespace_for_internal_users_spec.rb2
-rw-r--r--spec/migrations/drop_duplicate_protected_tags_spec.rb2
-rw-r--r--spec/migrations/enqueue_verify_pages_domain_workers_spec.rb2
-rw-r--r--spec/migrations/fill_empty_finished_at_in_deployments_spec.rb2
-rw-r--r--spec/migrations/fill_file_store_spec.rb4
-rw-r--r--spec/migrations/fill_productivity_analytics_start_date_spec.rb39
-rw-r--r--spec/migrations/fix_wrong_pages_access_level_spec.rb4
-rw-r--r--spec/migrations/generate_lets_encrypt_private_key_spec.rb2
-rw-r--r--spec/migrations/generate_missing_routes_spec.rb2
-rw-r--r--spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb4
-rw-r--r--spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/move_limits_from_plans_spec.rb37
-rw-r--r--spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb2
-rw-r--r--spec/migrations/remove_empty_github_service_templates_spec.rb55
-rw-r--r--spec/migrations/remove_redundant_pipeline_stages_spec.rb2
-rw-r--r--spec/migrations/reschedule_builds_stages_migration_spec.rb2
-rw-r--r--spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb2
-rw-r--r--spec/migrations/schedule_digest_personal_access_tokens_spec.rb4
-rw-r--r--spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb4
-rw-r--r--spec/migrations/schedule_runners_token_encryption_spec.rb2
-rw-r--r--spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb4
-rw-r--r--spec/migrations/schedule_stages_index_migration_spec.rb2
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_spec.rb4
-rw-r--r--spec/migrations/schedule_to_archive_legacy_traces_spec.rb4
-rw-r--r--spec/migrations/truncate_user_fullname_spec.rb2
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb10
-rw-r--r--spec/models/application_setting_spec.rb79
-rw-r--r--spec/models/aws/role_spec.rb52
-rw-r--r--spec/models/ci/build_spec.rb139
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb54
-rw-r--r--spec/models/ci/pipeline_spec.rb319
-rw-r--r--spec/models/clusters/applications/cert_manager_spec.rb2
-rw-r--r--spec/models/clusters/applications/crossplane_spec.rb57
-rw-r--r--spec/models/clusters/applications/elastic_stack_spec.rb179
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb38
-rw-r--r--spec/models/clusters/cluster_spec.rb160
-rw-r--r--spec/models/clusters/clusters_hierarchy_spec.rb40
-rw-r--r--spec/models/clusters/providers/aws_spec.rb62
-rw-r--r--spec/models/clusters/providers/gcp_spec.rb16
-rw-r--r--spec/models/commit_status_spec.rb4
-rw-r--r--spec/models/concerns/deployment_platform_spec.rb20
-rw-r--r--spec/models/concerns/from_union_spec.rb6
-rw-r--r--spec/models/concerns/issuable_spec.rb28
-rw-r--r--spec/models/concerns/noteable_spec.rb44
-rw-r--r--spec/models/concerns/redactable_spec.rb38
-rw-r--r--spec/models/concerns/subscribable_spec.rb56
-rw-r--r--spec/models/container_repository_spec.rb32
-rw-r--r--spec/models/deployment_merge_request_spec.rb14
-rw-r--r--spec/models/deployment_spec.rb80
-rw-r--r--spec/models/environment_spec.rb189
-rw-r--r--spec/models/environment_status_spec.rb2
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb22
-rw-r--r--spec/models/evidence_spec.rb2
-rw-r--r--spec/models/grafana_integration_spec.rb31
-rw-r--r--spec/models/group_group_link_spec.rb36
-rw-r--r--spec/models/group_spec.rb122
-rw-r--r--spec/models/hooks/system_hook_spec.rb2
-rw-r--r--spec/models/issue_spec.rb13
-rw-r--r--spec/models/lfs_object_spec.rb12
-rw-r--r--spec/models/merge_request_diff_spec.rb8
-rw-r--r--spec/models/merge_request_spec.rb262
-rw-r--r--spec/models/milestone_spec.rb11
-rw-r--r--spec/models/namespace_spec.rb38
-rw-r--r--spec/models/personal_snippet_spec.rb19
-rw-r--r--spec/models/project_import_state_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/pipeline_message_spec.rb39
-rw-r--r--spec/models/project_services/chat_message/push_message_spec.rb6
-rw-r--r--spec/models/project_services/data_fields_spec.rb18
-rw-r--r--spec/models/project_services/irker_service_spec.rb2
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb31
-rw-r--r--spec/models/project_snippet_spec.rb21
-rw-r--r--spec/models/project_spec.rb82
-rw-r--r--spec/models/release_spec.rb20
-rw-r--r--spec/models/releases/source_spec.rb2
-rw-r--r--spec/models/remote_mirror_spec.rb2
-rw-r--r--spec/models/service_spec.rb20
-rw-r--r--spec/models/shard_spec.rb3
-rw-r--r--spec/models/snippet_spec.rb37
-rw-r--r--spec/models/spam_log_spec.rb2
-rw-r--r--spec/models/todo_spec.rb47
-rw-r--r--spec/models/user_spec.rb24
-rw-r--r--spec/models/wiki_page_spec.rb11
-rw-r--r--spec/models/zoom_meeting_spec.rb154
-rw-r--r--spec/policies/application_setting/term_policy_spec.rb2
-rw-r--r--spec/policies/base_policy_spec.rb43
-rw-r--r--spec/policies/ci/build_policy_spec.rb2
-rw-r--r--spec/policies/ci/pipeline_policy_spec.rb2
-rw-r--r--spec/policies/ci/pipeline_schedule_policy_spec.rb2
-rw-r--r--spec/policies/ci/trigger_policy_spec.rb2
-rw-r--r--spec/policies/clusters/cluster_policy_spec.rb2
-rw-r--r--spec/policies/deploy_key_policy_spec.rb2
-rw-r--r--spec/policies/deploy_token_policy_spec.rb2
-rw-r--r--spec/policies/environment_policy_spec.rb2
-rw-r--r--spec/policies/global_policy_spec.rb2
-rw-r--r--spec/policies/group_policy_spec.rb2
-rw-r--r--spec/policies/issuable_policy_spec.rb2
-rw-r--r--spec/policies/issue_policy_spec.rb2
-rw-r--r--spec/policies/merge_request_policy_spec.rb2
-rw-r--r--spec/policies/namespace_policy_spec.rb2
-rw-r--r--spec/policies/note_policy_spec.rb2
-rw-r--r--spec/policies/personal_snippet_policy_spec.rb32
-rw-r--r--spec/policies/project_policy_spec.rb27
-rw-r--r--spec/policies/project_snippet_policy_spec.rb2
-rw-r--r--spec/policies/protected_branch_policy_spec.rb2
-rw-r--r--spec/policies/resource_label_event_policy_spec.rb2
-rw-r--r--spec/policies/user_policy_spec.rb2
-rw-r--r--spec/presenters/ci/bridge_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/build_presenter_spec.rb4
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/group_variable_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/trigger_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/variable_presenter_spec.rb2
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb2
-rw-r--r--spec/presenters/commit_status_presenter_spec.rb2
-rw-r--r--spec/presenters/conversational_development_index/metric_presenter_spec.rb2
-rw-r--r--spec/presenters/group_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/group_member_presenter_spec.rb2
-rw-r--r--spec/presenters/instance_clusterable_presenter_spec.rb37
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb2
-rw-r--r--spec/presenters/project_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/project_member_presenter_spec.rb2
-rw-r--r--spec/presenters/project_presenter_spec.rb62
-rw-r--r--spec/presenters/projects/settings/deploy_keys_presenter_spec.rb2
-rw-r--r--spec/presenters/release_presenter_spec.rb101
-rw-r--r--spec/requests/api/access_requests_spec.rb2
-rw-r--r--spec/requests/api/applications_spec.rb2
-rw-r--r--spec/requests/api/avatar_spec.rb2
-rw-r--r--spec/requests/api/award_emoji_spec.rb2
-rw-r--r--spec/requests/api/badges_spec.rb2
-rw-r--r--spec/requests/api/boards_spec.rb2
-rw-r--r--spec/requests/api/branches_spec.rb23
-rw-r--r--spec/requests/api/broadcast_messages_spec.rb2
-rw-r--r--spec/requests/api/commit_statuses_spec.rb6
-rw-r--r--spec/requests/api/commits_spec.rb45
-rw-r--r--spec/requests/api/deploy_keys_spec.rb2
-rw-r--r--spec/requests/api/deployments_spec.rb91
-rw-r--r--spec/requests/api/discussions_spec.rb2
-rw-r--r--spec/requests/api/doorkeeper_access_spec.rb2
-rw-r--r--spec/requests/api/environments_spec.rb2
-rw-r--r--spec/requests/api/events_spec.rb2
-rw-r--r--spec/requests/api/features_spec.rb17
-rw-r--r--spec/requests/api/files_spec.rb2
-rw-r--r--spec/requests/api/graphql/current_user/todos_query_spec.rb48
-rw-r--r--spec/requests/api/graphql/current_user_query_spec.rb33
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb134
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb108
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb79
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb66
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb63
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_wip_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/todos/mark_done_spec.rb97
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb141
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb2
-rw-r--r--spec/requests/api/group_boards_spec.rb2
-rw-r--r--spec/requests/api/group_clusters_spec.rb21
-rw-r--r--spec/requests/api/group_container_repositories_spec.rb10
-rw-r--r--spec/requests/api/group_export_spec.rb94
-rw-r--r--spec/requests/api/group_milestones_spec.rb2
-rw-r--r--spec/requests/api/group_variables_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb2
-rw-r--r--spec/requests/api/helpers_spec.rb2
-rw-r--r--spec/requests/api/import_github_spec.rb2
-rw-r--r--spec/requests/api/internal/base_spec.rb12
-rw-r--r--spec/requests/api/jobs_spec.rb10
-rw-r--r--spec/requests/api/keys_spec.rb2
-rw-r--r--spec/requests/api/labels_spec.rb2
-rw-r--r--spec/requests/api/lint_spec.rb2
-rw-r--r--spec/requests/api/markdown_spec.rb2
-rw-r--r--spec/requests/api/members_spec.rb16
-rw-r--r--spec/requests/api/merge_request_diffs_spec.rb2
-rw-r--r--spec/requests/api/merge_requests_spec.rb75
-rw-r--r--spec/requests/api/namespaces_spec.rb2
-rw-r--r--spec/requests/api/notes_spec.rb2
-rw-r--r--spec/requests/api/notification_settings_spec.rb2
-rw-r--r--spec/requests/api/oauth_tokens_spec.rb2
-rw-r--r--spec/requests/api/pages/internal_access_spec.rb2
-rw-r--r--spec/requests/api/pages/private_access_spec.rb2
-rw-r--r--spec/requests/api/pages/public_access_spec.rb2
-rw-r--r--spec/requests/api/pages_domains_spec.rb96
-rw-r--r--spec/requests/api/pipeline_schedules_spec.rb2
-rw-r--r--spec/requests/api/pipelines_spec.rb2
-rw-r--r--spec/requests/api/project_clusters_spec.rb21
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb9
-rw-r--r--spec/requests/api/project_events_spec.rb2
-rw-r--r--spec/requests/api/project_export_spec.rb4
-rw-r--r--spec/requests/api/project_hooks_spec.rb2
-rw-r--r--spec/requests/api/project_import_spec.rb4
-rw-r--r--spec/requests/api/project_milestones_spec.rb2
-rw-r--r--spec/requests/api/project_snapshots_spec.rb2
-rw-r--r--spec/requests/api/project_snippets_spec.rb2
-rw-r--r--spec/requests/api/project_templates_spec.rb2
-rw-r--r--spec/requests/api/projects_spec.rb119
-rw-r--r--spec/requests/api/protected_branches_spec.rb2
-rw-r--r--spec/requests/api/protected_tags_spec.rb2
-rw-r--r--spec/requests/api/releases_spec.rb2
-rw-r--r--spec/requests/api/repositories_spec.rb2
-rw-r--r--spec/requests/api/runner_spec.rb6
-rw-r--r--spec/requests/api/runners_spec.rb2
-rw-r--r--spec/requests/api/search_spec.rb3
-rw-r--r--spec/requests/api/services_spec.rb6
-rw-r--r--spec/requests/api/settings_spec.rb82
-rw-r--r--spec/requests/api/sidekiq_metrics_spec.rb6
-rw-r--r--spec/requests/api/snippets_spec.rb2
-rw-r--r--spec/requests/api/system_hooks_spec.rb2
-rw-r--r--spec/requests/api/tags_spec.rb2
-rw-r--r--spec/requests/api/templates_spec.rb2
-rw-r--r--spec/requests/api/todos_spec.rb2
-rw-r--r--spec/requests/api/triggers_spec.rb2
-rw-r--r--spec/requests/api/users_spec.rb34
-rw-r--r--spec/requests/api/variables_spec.rb2
-rw-r--r--spec/requests/api/version_spec.rb2
-rw-r--r--spec/requests/api/wikis_spec.rb2
-rw-r--r--spec/requests/git_http_spec.rb36
-rw-r--r--spec/requests/groups/milestones_controller_spec.rb2
-rw-r--r--spec/requests/groups/registry/repositories_controller_spec.rb36
-rw-r--r--spec/requests/health_controller_spec.rb227
-rw-r--r--spec/requests/jwt_controller_spec.rb2
-rw-r--r--spec/requests/lfs_locks_api_spec.rb2
-rw-r--r--spec/requests/oauth_tokens_spec.rb2
-rw-r--r--spec/requests/openid_connect_spec.rb2
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb10
-rw-r--r--spec/requests/rack_attack_global_spec.rb64
-rw-r--r--spec/requests/request_profiler_spec.rb2
-rw-r--r--spec/routing/admin_routing_spec.rb2
-rw-r--r--spec/routing/environments_spec.rb2
-rw-r--r--spec/routing/group_routing_spec.rb2
-rw-r--r--spec/routing/import_routing_spec.rb2
-rw-r--r--spec/routing/notifications_routing_spec.rb2
-rw-r--r--spec/routing/openid_connect_spec.rb2
-rw-r--r--spec/routing/project_routing_spec.rb8
-rw-r--r--spec/routing/routing_spec.rb29
-rw-r--r--spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb2
-rw-r--r--spec/rubocop/cop/avoid_return_from_blocks_spec.rb2
-rw-r--r--spec/rubocop/cop/destroy_all_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/httparty_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/predicate_memoization_spec.rb2
-rw-r--r--spec/rubocop/cop/group_public_or_visible_to_user_spec.rb2
-rw-r--r--spec/rubocop/cop/include_sidekiq_worker_spec.rb2
-rw-r--r--spec/rubocop/cop/line_break_around_conditional_block_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_concurrent_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_reference_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_timestamps_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/datetime_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/hash_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/remove_column_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/remove_concurrent_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/remove_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/safer_boolean_column_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/timestamps_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/update_column_in_batches_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/update_large_table_spec.rb2
-rw-r--r--spec/rubocop/cop/project_path_helper_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/any_instance_of_spec.rb61
-rw-r--r--spec/rubocop/cop/rspec/env_assignment_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb2
-rw-r--r--spec/rubocop/cop/sidekiq_options_queue_spec.rb2
-rw-r--r--spec/serializers/blob_entity_spec.rb12
-rw-r--r--spec/serializers/diff_file_base_entity_spec.rb15
-rw-r--r--spec/serializers/diff_file_entity_spec.rb7
-rw-r--r--spec/serializers/issuable_sidebar_extras_entity_spec.rb20
-rw-r--r--spec/serializers/job_artifact_report_entity_spec.rb2
-rw-r--r--spec/serializers/merge_request_diff_entity_spec.rb19
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb22
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb2
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb2
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb5
-rw-r--r--spec/services/ci/cancel_user_pipelines_service_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb168
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb272
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb114
-rw-r--r--spec/services/ci/find_exposed_artifacts_service_spec.rb147
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb22
-rw-r--r--spec/services/ci/register_job_service_spec.rb51
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb28
-rw-r--r--spec/services/clusters/aws/fetch_credentials_service_spec.rb68
-rw-r--r--spec/services/clusters/aws/finalize_creation_service_spec.rb124
-rw-r--r--spec/services/clusters/aws/provision_service_spec.rb131
-rw-r--r--spec/services/clusters/aws/proxy_service_spec.rb210
-rw-r--r--spec/services/clusters/aws/verify_provision_status_service_spec.rb76
-rw-r--r--spec/services/clusters/destroy_service_spec.rb56
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb2
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb45
-rw-r--r--spec/services/clusters/update_service_spec.rb127
-rw-r--r--spec/services/concerns/merge_requests/assigns_merge_params_spec.rb2
-rw-r--r--spec/services/create_branch_service_spec.rb15
-rw-r--r--spec/services/deployments/after_create_service_spec.rb34
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb121
-rw-r--r--spec/services/deployments/update_service_spec.rb52
-rw-r--r--spec/services/error_tracking/issue_details_service_spec.rb48
-rw-r--r--spec/services/error_tracking/issue_latest_event_service_spec.rb48
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb91
-rw-r--r--spec/services/error_tracking/list_projects_service_spec.rb2
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb2
-rw-r--r--spec/services/git/branch_push_service_spec.rb20
-rw-r--r--spec/services/groups/destroy_service_spec.rb18
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb119
-rw-r--r--spec/services/groups/group_links/destroy_service_spec.rb63
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb55
-rw-r--r--spec/services/groups/transfer_service_spec.rb30
-rw-r--r--spec/services/groups/update_service_spec.rb61
-rw-r--r--spec/services/import_export_clean_up_service_spec.rb2
-rw-r--r--spec/services/issues/close_service_spec.rb6
-rw-r--r--spec/services/issues/update_service_spec.rb40
-rw-r--r--spec/services/issues/zoom_link_service_spec.rb162
-rw-r--r--spec/services/members/destroy_service_spec.rb2
-rw-r--r--spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb4
-rw-r--r--spec/services/merge_requests/build_service_spec.rb34
-rw-r--r--spec/services/merge_requests/close_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb24
-rw-r--r--spec/services/merge_requests/create_service_spec.rb6
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb57
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb106
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb8
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb2
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb2
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb80
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb2
-rw-r--r--spec/services/merge_requests/resolved_discussion_notification_service_spec.rb2
-rw-r--r--spec/services/merge_requests/update_service_spec.rb20
-rw-r--r--spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb177
-rw-r--r--spec/services/metrics/dashboard/project_dashboard_service_spec.rb3
-rw-r--r--spec/services/metrics/dashboard/system_dashboard_service_spec.rb3
-rw-r--r--spec/services/namespaces/statistics_refresher_service_spec.rb2
-rw-r--r--spec/services/notification_service_spec.rb25
-rw-r--r--spec/services/projects/after_rename_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb47
-rw-r--r--spec/services/projects/destroy_service_spec.rb18
-rw-r--r--spec/services/projects/fork_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/base_attachment_service_spec.rb56
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb32
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migration_service_spec.rb22
-rw-r--r--spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/rollback_service_spec.rb17
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb2
-rw-r--r--spec/services/projects/lfs_pointers/lfs_link_service_spec.rb30
-rw-r--r--spec/services/projects/update_service_spec.rb4
-rw-r--r--spec/services/system_note_service_spec.rb252
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb2
-rw-r--r--spec/services/system_notes/merge_requests_service_spec.rb243
-rw-r--r--spec/services/users/signup_service_spec.rb64
-rw-r--r--spec/services/zoom_notes_service_spec.rb81
-rw-r--r--spec/sidekiq/cron/job_gem_dependency_spec.rb2
-rw-r--r--spec/spec_helper.rb5
-rw-r--r--spec/support/capybara.rb6
-rw-r--r--spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb8
-rw-r--r--spec/support/cycle_analytics_helpers/test_generation.rb2
-rwxr-xr-xspec/support/generate-seed-repo-rb7
-rw-r--r--spec/support/helpers/access_matchers_helpers.rb95
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb2
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb4
-rw-r--r--spec/support/helpers/grafana_api_helpers.rb41
-rw-r--r--spec/support/helpers/graphql_helpers.rb21
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb220
-rw-r--r--spec/support/helpers/login_helpers.rb2
-rw-r--r--spec/support/helpers/smime_helper.rb2
-rw-r--r--spec/support/helpers/stub_experiments.rb14
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb9
-rw-r--r--spec/support/helpers/test_env.rb41
-rw-r--r--spec/support/import_export/common_util.rb7
-rw-r--r--spec/support/matchers/access_matchers_for_request.rb53
-rw-r--r--spec/support/matchers/access_matchers_generic.rb66
-rw-r--r--spec/support/matchers/db_schema_matchers.rb32
-rwxr-xr-xspec/support/prepare-gitlab-git-test-for-commit1
-rw-r--r--spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb40
-rw-r--r--spec/support/shared_examples/container_repositories_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/cycle_analytics_event_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb137
-rw-r--r--spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/file_finder.rb10
-rw-r--r--spec/support/shared_examples/graphql/connection_paged_nodes.rb28
-rw-r--r--spec/support/shared_examples/graphql/sort_enum_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/config/entry/key_validations_shared_examples.rb81
-rw-r--r--spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb105
-rw-r--r--spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb17
-rw-r--r--spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb4
-rw-r--r--spec/support/shared_examples/models/concerns/issuable_shared_examples.rb (renamed from spec/support/shared_examples/models/concern/issuable_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/models/concerns/redactable_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/models/with_uploads_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/discussions.rb23
-rw-r--r--spec/support/shared_examples/requests/api/notes.rb2
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb118
-rw-r--r--spec/support/shared_examples/serializers/diff_file_entity_examples.rb33
-rw-r--r--spec/support/shared_examples/services/error_tracking_service_shared_examples.rb89
-rw-r--r--spec/support/shared_examples/updating_mentions_shared_examples.rb26
-rw-r--r--spec/support/sidekiq.rb21
-rwxr-xr-xspec/support/unpack-gitlab-git-test6
-rw-r--r--spec/tasks/gitlab/shell_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/task_helpers_spec.rb27
-rw-r--r--spec/uploaders/workers/object_storage/background_move_worker_spec.rb8
-rw-r--r--spec/views/admin/application_settings/integrations.html.haml_spec.rb34
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb71
-rw-r--r--spec/views/layouts/_head.html.haml_spec.rb2
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb72
-rw-r--r--spec/views/profiles/show.html.haml_spec.rb1
-rw-r--r--spec/views/projects/clusters/clusters/gcp/_form.html.haml_spec.rb38
-rw-r--r--spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb2
-rw-r--r--spec/views/projects/merge_requests/_commits.html.haml_spec.rb2
-rw-r--r--spec/views/projects/pages_domains/show.html.haml_spec.rb34
-rw-r--r--spec/views/projects/show.html.haml_spec.rb41
-rw-r--r--spec/views/projects/tree/_tree_header.html.haml_spec.rb2
-rw-r--r--spec/views/projects/tree/show.html.haml_spec.rb48
-rw-r--r--spec/workers/cluster_provision_worker_spec.rb13
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb37
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb51
-rw-r--r--spec/workers/group_export_worker_spec.rb29
-rw-r--r--spec/workers/hashed_storage/migrator_worker_spec.rb2
-rw-r--r--spec/workers/hashed_storage/rollbacker_worker_spec.rb2
-rw-r--r--spec/workers/merge_worker_spec.rb1
-rw-r--r--spec/workers/new_note_worker_spec.rb19
-rw-r--r--spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb2
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb2
-rw-r--r--spec/workers/process_commit_worker_spec.rb3
-rw-r--r--spec/workers/project_cache_worker_spec.rb2
-rw-r--r--spec/workers/remove_expired_group_links_worker_spec.rb57
-rw-r--r--spec/workers/repository_check/single_repository_worker_spec.rb8
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb21
-rw-r--r--spec/workers/stuck_merge_jobs_worker_spec.rb2
-rw-r--r--spec/workers/wait_for_cluster_creation_worker_spec.rb15
1153 files changed, 33553 insertions, 10547 deletions
diff --git a/spec/controllers/abuse_reports_controller_spec.rb b/spec/controllers/abuse_reports_controller_spec.rb
index e360ab68cf2..e573ef4be49 100644
--- a/spec/controllers/abuse_reports_controller_spec.rb
+++ b/spec/controllers/abuse_reports_controller_spec.rb
@@ -49,7 +49,9 @@ describe AbuseReportsController do
end
it 'calls notify' do
- expect_any_instance_of(AbuseReport).to receive(:notify)
+ expect_next_instance_of(AbuseReport) do |instance|
+ expect(instance).to receive(:notify)
+ end
post :create, params: { abuse_report: attrs }
end
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index 233710b9fc3..ebae931764d 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -73,7 +73,7 @@ describe Admin::ClustersController do
end
describe 'GET #new' do
- def get_new(provider: 'gke')
+ def get_new(provider: 'gcp')
get :new, params: { provider: provider }
end
@@ -227,16 +227,17 @@ describe Admin::ClustersController do
describe 'security' do
before do
- allow_any_instance_of(described_class)
- .to receive(:token_in_session).and_return('token')
- allow_any_instance_of(described_class)
- .to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
- allow_any_instance_of(GoogleApi::CloudPlatform::Client)
- .to receive(:projects_zones_clusters_create) do
- OpenStruct.new(
- self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123',
- status: 'RUNNING'
- )
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:token_in_session).and_return('token')
+ allow(instance).to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
+ end
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ allow(instance).to receive(:projects_zones_clusters_create) do
+ OpenStruct.new(
+ self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123',
+ status: 'RUNNING'
+ )
+ end
end
allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
@@ -248,6 +249,69 @@ describe Admin::ClustersController do
end
end
+ describe 'POST #create_aws' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ provider_aws_attributes: {
+ key_name: 'key',
+ role_arn: 'arn:role',
+ region: 'region',
+ vpc_id: 'vpc',
+ instance_type: 'instance type',
+ num_nodes: 3,
+ security_group_id: 'security group',
+ subnet_ids: %w(subnet1 subnet2)
+ }
+ }
+ }
+ end
+
+ def post_create_aws
+ post :create_aws, params: params
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { post_create_aws }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Aws.count }
+
+ cluster = Clusters::Cluster.instance_type.first
+
+ expect(response.status).to eq(201)
+ expect(response.location).to eq(admin_cluster_path(cluster))
+ expect(cluster).to be_aws
+ expect(cluster).to be_kubernetes
+ end
+
+ context 'params are invalid' do
+ let(:params) do
+ {
+ cluster: { name: '' }
+ }
+ end
+
+ it 'does not create a cluster' do
+ expect { post_create_aws }.not_to change { Clusters::Cluster.count }
+
+ expect(response.status).to eq(422)
+ expect(response.content_type).to eq('application/json')
+ expect(response.body).to include('is invalid')
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(WaitForClusterCreationWorker).to receive(:perform_in)
+ end
+
+ it { expect { post_create_aws }.to be_allowed_for(:admin) }
+ it { expect { post_create_aws }.to be_denied_for(:user) }
+ it { expect { post_create_aws }.to be_denied_for(:external) }
+ end
+ end
+
describe 'POST #create_user' do
let(:params) do
{
@@ -318,6 +382,72 @@ describe Admin::ClustersController do
end
end
+ describe 'POST authorize AWS role for EKS cluster' do
+ let(:role_arn) { 'arn:aws:iam::123456789012:role/role-name' }
+ let(:role_external_id) { '12345' }
+
+ let(:params) do
+ {
+ cluster: {
+ role_arn: role_arn,
+ role_external_id: role_external_id
+ }
+ }
+ end
+
+ def go
+ post :authorize_aws_role, params: params
+ end
+
+ it 'creates an Aws::Role record' do
+ expect { go }.to change { Aws::Role.count }
+
+ expect(response.status).to eq 201
+
+ role = Aws::Role.last
+ expect(role.user).to eq admin
+ expect(role.role_arn).to eq role_arn
+ expect(role.role_external_id).to eq role_external_id
+ end
+
+ context 'role cannot be created' do
+ let(:role_arn) { 'invalid-role' }
+
+ it 'does not create a record' do
+ expect { go }.not_to change { Aws::Role.count }
+
+ expect(response.status).to eq 422
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'DELETE revoke AWS role for EKS cluster' do
+ let!(:role) { create(:aws_role, user: admin) }
+
+ def go
+ delete :revoke_aws_role
+ end
+
+ it 'deletes the Aws::Role record' do
+ expect { go }.to change { Aws::Role.count }
+
+ expect(response.status).to eq 204
+ expect(admin.reload_aws_role).to be_nil
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+ end
+
describe 'GET #cluster_status' do
let(:cluster) { create(:cluster, :providing_by_gcp, :instance) }
@@ -338,7 +468,9 @@ describe Admin::ClustersController do
end
it 'invokes schedule_status_update on each application' do
- expect_any_instance_of(Clusters::Applications::Ingress).to receive(:schedule_status_update)
+ expect_next_instance_of(Clusters::Applications::Ingress) do |instance|
+ expect(instance).to receive(:schedule_status_update)
+ end
get_cluster_status
end
diff --git a/spec/controllers/admin/identities_controller_spec.rb b/spec/controllers/admin/identities_controller_spec.rb
index 68695afdb61..256aafe09f8 100644
--- a/spec/controllers/admin/identities_controller_spec.rb
+++ b/spec/controllers/admin/identities_controller_spec.rb
@@ -13,7 +13,9 @@ describe Admin::IdentitiesController do
let(:user) { create(:omniauth_user, provider: 'ldapmain', extern_uid: 'uid=myuser,ou=people,dc=example,dc=com') }
it 'repairs ldap blocks' do
- expect_any_instance_of(RepairLdapBlockedUserService).to receive(:execute)
+ expect_next_instance_of(RepairLdapBlockedUserService) do |instance|
+ expect(instance).to receive(:execute)
+ end
put :update, params: { user_id: user.username, id: user.ldap_identity.id, identity: { provider: 'twitter' } }
end
@@ -23,7 +25,9 @@ describe Admin::IdentitiesController do
let(:user) { create(:omniauth_user, provider: 'ldapmain', extern_uid: 'uid=myuser,ou=people,dc=example,dc=com') }
it 'repairs ldap blocks' do
- expect_any_instance_of(RepairLdapBlockedUserService).to receive(:execute)
+ expect_next_instance_of(RepairLdapBlockedUserService) do |instance|
+ expect(instance).to receive(:execute)
+ end
delete :destroy, params: { user_id: user.username, id: user.ldap_identity.id }
end
diff --git a/spec/controllers/admin/spam_logs_controller_spec.rb b/spec/controllers/admin/spam_logs_controller_spec.rb
index 3bc49023357..baf4216dcde 100644
--- a/spec/controllers/admin/spam_logs_controller_spec.rb
+++ b/spec/controllers/admin/spam_logs_controller_spec.rb
@@ -27,7 +27,7 @@ describe Admin::SpamLogsController do
expect(response).to have_gitlab_http_status(200)
end
- it 'removes user and his spam logs when removing the user' do
+ it 'removes user and his spam logs when removing the user', :sidekiq_might_not_need_inline do
delete :destroy, params: { id: first_spam.id, remove_user: true }
expect(flash[:notice]).to eq "User #{user.username} was successfully removed."
@@ -39,7 +39,9 @@ describe Admin::SpamLogsController do
describe '#mark_as_ham' do
before do
- allow_any_instance_of(AkismetService).to receive(:submit_ham).and_return(true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive(:submit_ham).and_return(true)
+ end
end
it 'submits the log as ham' do
post :mark_as_ham, params: { id: first_spam.id }
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index afe21c8b34a..50ba7418d2c 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -35,7 +35,7 @@ describe Admin::UsersController do
end
end
- describe 'DELETE #user with projects' do
+ describe 'DELETE #user with projects', :sidekiq_might_not_need_inline do
let(:project) { create(:project, namespace: user.namespace) }
let!(:issue) { create(:issue, author: user) }
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 993e4020a75..4a10e7b5325 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -96,30 +96,14 @@ describe ApplicationController do
request.path = '/-/peek'
end
- # TODO:
- # remove line below once `privacy_policy_update_callout`
- # feature flag is removed and `gon` reverts back to
- # to not setting any variables.
- if Gitlab.ee?
- it_behaves_like 'setting gon variables'
- else
- it_behaves_like 'not setting gon variables'
- end
+ it_behaves_like 'not setting gon variables'
end
end
context 'with json format' do
let(:format) { :json }
- # TODO:
- # remove line below once `privacy_policy_update_callout`
- # feature flag is removed and `gon` reverts back to
- # to not setting any variables.
- if Gitlab.ee?
- it_behaves_like 'setting gon variables'
- else
- it_behaves_like 'not setting gon variables'
- end
+ it_behaves_like 'not setting gon variables'
end
end
@@ -655,7 +639,7 @@ describe ApplicationController do
context 'given a 422 error page' do
controller do
def index
- render 'errors/omniauth_error', layout: 'errors', status: 422
+ render 'errors/omniauth_error', layout: 'errors', status: :unprocessable_entity
end
end
@@ -669,7 +653,7 @@ describe ApplicationController do
context 'given a 500 error page' do
controller do
def index
- render 'errors/omniauth_error', layout: 'errors', status: 500
+ render 'errors/omniauth_error', layout: 'errors', status: :internal_server_error
end
end
@@ -683,7 +667,7 @@ describe ApplicationController do
context 'given a 200 success page' do
controller do
def index
- render 'errors/omniauth_error', layout: 'errors', status: 200
+ render 'errors/omniauth_error', layout: 'errors', status: :ok
end
end
@@ -843,7 +827,7 @@ describe ApplicationController do
end
end
- describe '#require_role' do
+ describe '#required_signup_info' do
controller(described_class) do
def index; end
end
@@ -852,7 +836,7 @@ describe ApplicationController do
let(:experiment_enabled) { true }
before do
- stub_experiment(signup_flow: experiment_enabled)
+ stub_experiment_for_user(signup_flow: experiment_enabled)
end
context 'experiment enabled and user with required role' do
@@ -865,7 +849,7 @@ describe ApplicationController do
it { is_expected.to redirect_to users_sign_up_welcome_path }
end
- context 'experiment enabled and user without a role' do
+ context 'experiment enabled and user without a required role' do
before do
sign_in(user)
get :index
@@ -874,7 +858,7 @@ describe ApplicationController do
it { is_expected.not_to redirect_to users_sign_up_welcome_path }
end
- context 'experiment disabled and user with required role' do
+ context 'experiment disabled' do
let(:experiment_enabled) { false }
before do
diff --git a/spec/controllers/concerns/confirm_email_warning_spec.rb b/spec/controllers/concerns/confirm_email_warning_spec.rb
index 0c598a360af..25429cdd149 100644
--- a/spec/controllers/concerns/confirm_email_warning_spec.rb
+++ b/spec/controllers/concerns/confirm_email_warning_spec.rb
@@ -19,7 +19,7 @@ describe ConfirmEmailWarning do
RSpec::Matchers.define :set_confirm_warning_for do |email|
match do |response|
- expect(response).to set_flash.now[:warning].to include("Please check your email (#{email}) to verify that you own this address.")
+ expect(response).to set_flash.now[:warning].to include("Please check your email (#{email}) to verify that you own this address and unlock the power of CI/CD.")
end
end
diff --git a/spec/controllers/concerns/metrics_dashboard_spec.rb b/spec/controllers/concerns/metrics_dashboard_spec.rb
index a71e34fd1ca..ff2b6fbb8ec 100644
--- a/spec/controllers/concerns/metrics_dashboard_spec.rb
+++ b/spec/controllers/concerns/metrics_dashboard_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
describe MetricsDashboard do
+ include MetricsDashboardHelpers
+
describe 'GET #metrics_dashboard' do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { project_with_dashboard('.gitlab/dashboards/test.yml') }
let_it_be(:environment) { create(:environment, project: project) }
before do
@@ -31,11 +33,13 @@ describe MetricsDashboard do
end
context 'when params are provided' do
+ let(:params) { { environment: environment } }
+
before do
allow(controller).to receive(:project).and_return(project)
allow(controller)
.to receive(:metrics_dashboard_params)
- .and_return(environment: environment)
+ .and_return(params)
end
it 'returns the specified dashboard' do
@@ -43,6 +47,15 @@ describe MetricsDashboard do
expect(json_response).not_to have_key('all_dashboards')
end
+ context 'when the params are in an alternate format' do
+ let(:params) { ActionController::Parameters.new({ environment: environment }).permit! }
+
+ it 'returns the specified dashboard' do
+ expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
+ expect(json_response).not_to have_key('all_dashboards')
+ end
+ end
+
context 'when parameters are provided and the list of all dashboards is required' do
before do
allow(controller).to receive(:include_all_dashboards?).and_return(true)
@@ -52,6 +65,36 @@ describe MetricsDashboard do
expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
expect(json_response).to have_key('all_dashboards')
end
+
+ context 'in all_dashboard list' do
+ let(:system_dashboard) { json_response['all_dashboards'].find { |dashboard| dashboard["system_dashboard"] == true } }
+ let(:project_dashboard) { json_response['all_dashboards'].find { |dashboard| dashboard["system_dashboard"] == false } }
+
+ it 'includes project_blob_path only for project dashboards' do
+ expect(system_dashboard['project_blob_path']).to be_nil
+ expect(project_dashboard['project_blob_path']).to eq("/#{project.namespace.path}/#{project.name}/blob/master/.gitlab/dashboards/test.yml")
+ end
+
+ describe 'project permissions' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:can_collaborate, :system_can_edit, :project_can_edit) do
+ false | false | false
+ true | false | true
+ end
+
+ with_them do
+ before do
+ allow(controller).to receive(:can_collaborate_with_project?).and_return(can_collaborate)
+ end
+
+ it "sets can_edit appropriately" do
+ expect(system_dashboard["can_edit"]).to eq(system_can_edit)
+ expect(project_dashboard["can_edit"]).to eq(project_can_edit)
+ end
+ end
+ end
+ end
end
end
end
diff --git a/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb b/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb
new file mode 100644
index 00000000000..903100ba93f
--- /dev/null
+++ b/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe RedirectsForMissingPathOnTree, type: :controller do
+ controller(ActionController::Base) do
+ include Gitlab::Routing.url_helpers
+ include RedirectsForMissingPathOnTree
+
+ def fake
+ redirect_to_tree_root_for_missing_path(Project.find(params[:project_id]), params[:ref], params[:file_path])
+ end
+ end
+
+ let(:project) { create(:project) }
+
+ before do
+ routes.draw { get 'fake' => 'anonymous#fake' }
+ end
+
+ describe '#redirect_to_root_path' do
+ it 'redirects to the tree path with a notice' do
+ long_file_path = ('a/b/' * 30) + 'foo.txt'
+ truncated_file_path = '...b/' + ('a/b/' * 12) + 'foo.txt'
+ expected_message = "\"#{truncated_file_path}\" did not exist on \"theref\""
+
+ get :fake, params: { project_id: project.id, ref: 'theref', file_path: long_file_path }
+
+ expect(response).to redirect_to project_tree_path(project, 'theref')
+ expect(response.flash[:notice]).to eq(expected_message)
+ end
+ end
+end
diff --git a/spec/controllers/concerns/renders_commits_spec.rb b/spec/controllers/concerns/renders_commits_spec.rb
new file mode 100644
index 00000000000..79350847383
--- /dev/null
+++ b/spec/controllers/concerns/renders_commits_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe RendersCommits do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user) }
+
+ controller(ApplicationController) do
+ # `described_class` is not available in this context
+ include RendersCommits # rubocop:disable RSpec/DescribedClass
+
+ def index
+ @merge_request = MergeRequest.find(params[:id])
+ @commits = set_commits_for_rendering(
+ @merge_request.recent_commits.with_latest_pipeline(@merge_request.source_branch),
+ commits_count: @merge_request.commits_count
+ )
+
+ render json: { html: view_to_html_string('projects/merge_requests/_commits') }
+ end
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ def go
+ get :index, params: { id: merge_request.id }
+ end
+
+ it 'sets instance variables for counts' do
+ stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 10)
+
+ go
+
+ expect(assigns[:total_commit_count]).to eq(29)
+ expect(assigns[:hidden_commit_count]).to eq(19)
+ expect(assigns[:commits].size).to eq(10)
+ end
+
+ context 'rendering commits' do
+ render_views
+
+ it 'avoids N + 1' do
+ stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 5)
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ go
+ end.count
+
+ stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 15)
+
+ expect do
+ go
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
+end
diff --git a/spec/controllers/concerns/sourcegraph_gon_spec.rb b/spec/controllers/concerns/sourcegraph_gon_spec.rb
new file mode 100644
index 00000000000..4fb7e37d148
--- /dev/null
+++ b/spec/controllers/concerns/sourcegraph_gon_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SourcegraphGon do
+ let_it_be(:enabled_user) { create(:user, sourcegraph_enabled: true) }
+ let_it_be(:disabled_user) { create(:user, sourcegraph_enabled: false) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:internal_project) { create(:project, :internal) }
+
+ let(:sourcegraph_url) { 'http://sourcegraph.gitlab.com' }
+ let(:feature_enabled) { true }
+ let(:sourcegraph_enabled) { true }
+ let(:sourcegraph_public_only) { false }
+ let(:format) { :html }
+ let(:user) { enabled_user }
+ let(:project) { internal_project }
+
+ controller(ApplicationController) do
+ include SourcegraphGon # rubocop:disable RSpec/DescribedClass
+
+ def index
+ head :ok
+ end
+ end
+
+ before do
+ Feature.get(:sourcegraph).enable(feature_enabled)
+
+ stub_application_setting(sourcegraph_url: sourcegraph_url, sourcegraph_enabled: sourcegraph_enabled, sourcegraph_public_only: sourcegraph_public_only)
+
+ allow(controller).to receive(:project).and_return(project)
+
+ Gon.clear
+
+ sign_in user if user
+ end
+
+ after do
+ Feature.get(:sourcegraph).disable
+ end
+
+ subject do
+ get :index, format: format
+
+ Gon.sourcegraph
+ end
+
+ shared_examples 'enabled' do
+ it { is_expected.to eq({ url: sourcegraph_url }) }
+ end
+
+ shared_examples 'disabled' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with feature enabled, application enabled, and user enabled' do
+ it_behaves_like 'enabled'
+ end
+
+ context 'with feature enabled for specific project' do
+ let(:feature_enabled) { project }
+
+ it_behaves_like 'enabled'
+ end
+
+ context 'with feature enabled for different project' do
+ let(:feature_enabled) { create(:project) }
+
+ it_behaves_like 'disabled'
+ end
+
+ context 'with feature disabled' do
+ let(:feature_enabled) { false }
+
+ it_behaves_like 'disabled'
+ end
+
+ context 'with admin settings disabled' do
+ let(:sourcegraph_enabled) { false }
+
+ it_behaves_like 'disabled'
+ end
+
+ context 'with public only' do
+ let(:sourcegraph_public_only) { true }
+
+ context 'with internal project' do
+ let(:project) { internal_project }
+
+ it_behaves_like 'disabled'
+ end
+
+ context 'with public project' do
+ let(:project) { public_project }
+
+ it_behaves_like 'enabled'
+ end
+ end
+
+ context 'with user disabled' do
+ let(:user) { disabled_user }
+
+ it_behaves_like 'disabled'
+ end
+
+ context 'with no user' do
+ let(:user) { nil }
+
+ it_behaves_like 'disabled'
+ end
+
+ context 'with non-html format' do
+ let(:format) { :json }
+
+ it_behaves_like 'disabled'
+ end
+end
diff --git a/spec/controllers/google_api/authorizations_controller_spec.rb b/spec/controllers/google_api/authorizations_controller_spec.rb
index 940bf9c6828..4d200140f16 100644
--- a/spec/controllers/google_api/authorizations_controller_spec.rb
+++ b/spec/controllers/google_api/authorizations_controller_spec.rb
@@ -13,8 +13,9 @@ describe GoogleApi::AuthorizationsController do
before do
sign_in(user)
- allow_any_instance_of(GoogleApi::CloudPlatform::Client)
- .to receive(:get_token).and_return([token, expires_at])
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ allow(instance).to receive(:get_token).and_return([token, expires_at])
+ end
end
shared_examples_for 'access denied' do
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 51a6dcca640..d027405703b 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -85,7 +85,7 @@ describe Groups::ClustersController do
end
describe 'GET new' do
- def go(provider: 'gke')
+ def go(provider: 'gcp')
get :new, params: { group_id: group, provider: provider }
end
@@ -372,6 +372,150 @@ describe Groups::ClustersController do
end
end
+ describe 'POST #create_aws' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ provider_aws_attributes: {
+ key_name: 'key',
+ role_arn: 'arn:role',
+ region: 'region',
+ vpc_id: 'vpc',
+ instance_type: 'instance type',
+ num_nodes: 3,
+ security_group_id: 'security group',
+ subnet_ids: %w(subnet1 subnet2)
+ }
+ }
+ }
+ end
+
+ def post_create_aws
+ post :create_aws, params: params.merge(group_id: group)
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { post_create_aws }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Aws.count }
+
+ cluster = group.clusters.first
+
+ expect(response.status).to eq(201)
+ expect(response.location).to eq(group_cluster_path(group, cluster))
+ expect(cluster).to be_aws
+ expect(cluster).to be_kubernetes
+ end
+
+ context 'params are invalid' do
+ let(:params) do
+ {
+ cluster: { name: '' }
+ }
+ end
+
+ it 'does not create a cluster' do
+ expect { post_create_aws }.not_to change { Clusters::Cluster.count }
+
+ expect(response.status).to eq(422)
+ expect(response.content_type).to eq('application/json')
+ expect(response.body).to include('is invalid')
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(WaitForClusterCreationWorker).to receive(:perform_in)
+ end
+
+ it { expect { post_create_aws }.to be_allowed_for(:admin) }
+ it { expect { post_create_aws }.to be_allowed_for(:owner).of(group) }
+ it { expect { post_create_aws }.to be_allowed_for(:maintainer).of(group) }
+ it { expect { post_create_aws }.to be_denied_for(:developer).of(group) }
+ it { expect { post_create_aws }.to be_denied_for(:reporter).of(group) }
+ it { expect { post_create_aws }.to be_denied_for(:guest).of(group) }
+ it { expect { post_create_aws }.to be_denied_for(:user) }
+ it { expect { post_create_aws }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST authorize AWS role for EKS cluster' do
+ let(:role_arn) { 'arn:aws:iam::123456789012:role/role-name' }
+ let(:role_external_id) { '12345' }
+
+ let(:params) do
+ {
+ cluster: {
+ role_arn: role_arn,
+ role_external_id: role_external_id
+ }
+ }
+ end
+
+ def go
+ post :authorize_aws_role, params: params.merge(group_id: group)
+ end
+
+ it 'creates an Aws::Role record' do
+ expect { go }.to change { Aws::Role.count }
+
+ expect(response.status).to eq 201
+
+ role = Aws::Role.last
+ expect(role.user).to eq user
+ expect(role.role_arn).to eq role_arn
+ expect(role.role_external_id).to eq role_external_id
+ end
+
+ context 'role cannot be created' do
+ let(:role_arn) { 'invalid-role' }
+
+ it 'does not create a record' do
+ expect { go }.not_to change { Aws::Role.count }
+
+ expect(response.status).to eq 422
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(group) }
+ it { expect { go }.to be_allowed_for(:maintainer).of(group) }
+ it { expect { go }.to be_denied_for(:developer).of(group) }
+ it { expect { go }.to be_denied_for(:reporter).of(group) }
+ it { expect { go }.to be_denied_for(:guest).of(group) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'DELETE revoke AWS role for EKS cluster' do
+ let!(:role) { create(:aws_role, user: user) }
+
+ def go
+ delete :revoke_aws_role, params: { group_id: group }
+ end
+
+ it 'deletes the Aws::Role record' do
+ expect { go }.to change { Aws::Role.count }
+
+ expect(response.status).to eq 204
+ expect(user.reload_aws_role).to be_nil
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(group) }
+ it { expect { go }.to be_allowed_for(:maintainer).of(group) }
+ it { expect { go }.to be_denied_for(:developer).of(group) }
+ it { expect { go }.to be_denied_for(:reporter).of(group) }
+ it { expect { go }.to be_denied_for(:guest).of(group) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+ end
+
describe 'GET cluster_status' do
let(:cluster) { create(:cluster, :providing_by_gcp, cluster_type: :group_type, groups: [group]) }
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
new file mode 100644
index 00000000000..8f04822fee6
--- /dev/null
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::GroupLinksController do
+ let(:shared_with_group) { create(:group, :private) }
+ let(:shared_group) { create(:group, :private) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe '#create' do
+ let(:shared_with_group_id) { shared_with_group.id }
+
+ subject do
+ post(:create,
+ params: { group_id: shared_group,
+ shared_with_group_id: shared_with_group_id,
+ shared_group_access: GroupGroupLink.default_access })
+ end
+
+ context 'when user has correct access to both groups' do
+ let(:group_member) { create(:user) }
+
+ before do
+ shared_with_group.add_developer(user)
+ shared_group.add_owner(user)
+
+ shared_with_group.add_developer(group_member)
+ end
+
+ it 'links group with selected group' do
+ expect { subject }.to change { shared_with_group.shared_groups.include?(shared_group) }.from(false).to(true)
+ end
+
+ it 'redirects to group links page' do
+ subject
+
+ expect(response).to(redirect_to(group_group_members_path(shared_group)))
+ end
+
+ it 'allows access for group member' do
+ expect { subject }.to change { group_member.can?(:read_group, shared_group) }.from(false).to(true)
+ end
+
+ context 'when shared with group id is not present' do
+ let(:shared_with_group_id) { nil }
+
+ it 'redirects to group links page' do
+ subject
+
+ expect(response).to(redirect_to(group_group_members_path(shared_group)))
+ expect(flash[:alert]).to eq('Please select a group.')
+ end
+ end
+
+ context 'when link is not persisted in the database' do
+ before do
+ allow(::Groups::GroupLinks::CreateService).to(
+ receive_message_chain(:new, :execute)
+ .and_return({ status: :error,
+ http_status: 409,
+ message: 'error' }))
+ end
+
+ it 'redirects to group links page' do
+ subject
+
+ expect(response).to(redirect_to(group_group_members_path(shared_group)))
+ expect(flash[:alert]).to eq('error')
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(share_group_with_group: false)
+ end
+
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when user does not have access to the group' do
+ before do
+ shared_group.add_owner(user)
+ end
+
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when user does not have admin access to the shared group' do
+ before do
+ shared_with_group.add_developer(user)
+ shared_group.add_developer(user)
+ end
+
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index e0a3605d50a..4f4f9e5143b 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -314,6 +314,24 @@ describe Groups::MilestonesController do
expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
end
+ context 'with an AJAX request' do
+ it 'redirects to the canonical path but does not set flash message' do
+ get :merge_requests, params: { group_id: redirect_route.path, id: title }, xhr: true
+
+ expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+
+ context 'with JSON format' do
+ it 'redirects to the canonical path but does not set flash message' do
+ get :merge_requests, params: { group_id: redirect_route.path, id: title }, format: :json
+
+ expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title, format: :json))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+
context 'when the old group path is a substring of the scheme or host' do
let(:redirect_route) { group.redirect_routes.create(path: 'http') }
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 3c39a6468e5..2ed2b319298 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -45,7 +45,7 @@ describe GroupsController do
it { is_expected.to render_template('groups/show') }
- it 'assigns events for all the projects in the group' do
+ it 'assigns events for all the projects in the group', :sidekiq_might_not_need_inline do
subject
expect(assigns(:events)).to contain_exactly(event)
end
@@ -125,7 +125,7 @@ describe GroupsController do
end
context 'as json' do
- it 'includes events from all projects in group and subgroups' do
+ it 'includes events from all projects in group and subgroups', :sidekiq_might_not_need_inline do
2.times do
project = create(:project, group: group)
create(:event, project: project)
@@ -255,7 +255,7 @@ describe GroupsController do
end
end
- describe 'GET #issues' do
+ describe 'GET #issues', :sidekiq_might_not_need_inline do
let(:issue_1) { create(:issue, project: project, title: 'foo') }
let(:issue_2) { create(:issue, project: project, title: 'bar') }
@@ -304,7 +304,7 @@ describe GroupsController do
end
end
- describe 'GET #merge_requests' do
+ describe 'GET #merge_requests', :sidekiq_might_not_need_inline do
let(:merge_request_1) { create(:merge_request, source_project: project) }
let(:merge_request_2) { create(:merge_request, :simple, source_project: project) }
diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb
deleted file mode 100644
index 8a2291bccd7..00000000000
--- a/spec/controllers/health_controller_spec.rb
+++ /dev/null
@@ -1,134 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe HealthController do
- include StubENV
-
- let(:token) { Gitlab::CurrentSettings.health_check_access_token }
- let(:whitelisted_ip) { '127.0.0.1' }
- let(:not_whitelisted_ip) { '127.0.0.2' }
-
- before do
- allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip])
- stub_storage_settings({}) # Hide the broken storage
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
- end
-
- describe '#readiness' do
- shared_context 'endpoint responding with readiness data' do
- let(:request_params) { {} }
-
- subject { get :readiness, params: request_params }
-
- it 'responds with readiness checks data' do
- subject
-
- expect(json_response['db_check']).to contain_exactly({ 'status' => 'ok' })
- expect(json_response['cache_check']).to contain_exactly({ 'status' => 'ok' })
- expect(json_response['queues_check']).to contain_exactly({ 'status' => 'ok' })
- expect(json_response['shared_state_check']).to contain_exactly({ 'status' => 'ok' })
- expect(json_response['gitaly_check']).to contain_exactly(
- { 'status' => 'ok', 'labels' => { 'shard' => 'default' } })
- end
-
- it 'responds with readiness checks data when a failure happens' do
- allow(Gitlab::HealthChecks::Redis::RedisCheck).to receive(:readiness).and_return(
- Gitlab::HealthChecks::Result.new('redis_check', false, "check error"))
-
- subject
-
- expect(json_response['cache_check']).to contain_exactly({ 'status' => 'ok' })
- expect(json_response['redis_check']).to contain_exactly(
- { 'status' => 'failed', 'message' => 'check error' })
-
- expect(response.status).to eq(503)
- expect(response.headers['X-GitLab-Custom-Error']).to eq(1)
- end
- end
-
- context 'accessed from whitelisted ip' do
- before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
- end
-
- it_behaves_like 'endpoint responding with readiness data'
- end
-
- context 'accessed from not whitelisted ip' do
- before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
- end
-
- it 'responds with resource not found' do
- get :readiness
-
- expect(response.status).to eq(404)
- end
-
- context 'accessed with valid token' do
- context 'token passed in request header' do
- before do
- request.headers['TOKEN'] = token
- end
-
- it_behaves_like 'endpoint responding with readiness data'
- end
- end
-
- context 'token passed as URL param' do
- it_behaves_like 'endpoint responding with readiness data' do
- let(:request_params) { { token: token } }
- end
- end
- end
- end
-
- describe '#liveness' do
- shared_context 'endpoint responding with liveness data' do
- subject { get :liveness }
-
- it 'responds with liveness checks data' do
- subject
-
- expect(json_response).to eq('status' => 'ok')
- end
- end
-
- context 'accessed from whitelisted ip' do
- before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
- end
-
- it_behaves_like 'endpoint responding with liveness data'
- end
-
- context 'accessed from not whitelisted ip' do
- before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
- end
-
- it 'responds with resource not found' do
- get :liveness
-
- expect(response.status).to eq(404)
- end
-
- context 'accessed with valid token' do
- context 'token passed in request header' do
- before do
- request.headers['TOKEN'] = token
- end
-
- it_behaves_like 'endpoint responding with liveness data'
- end
-
- context 'token passed as URL param' do
- it_behaves_like 'endpoint responding with liveness data' do
- subject { get :liveness, params: { token: token } }
- end
- end
- end
- end
- end
-end
diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb
index e465eca6c71..6a3713a1212 100644
--- a/spec/controllers/import/gitlab_controller_spec.rb
+++ b/spec/controllers/import/gitlab_controller_spec.rb
@@ -20,8 +20,9 @@ describe Import::GitlabController do
describe "GET callback" do
it "updates access token" do
- allow_any_instance_of(Gitlab::GitlabImport::Client)
- .to receive(:get_token).and_return(token)
+ allow_next_instance_of(Gitlab::GitlabImport::Client) do |instance|
+ allow(instance).to receive(:get_token).and_return(token)
+ end
stub_omniauth_provider('gitlab')
get :callback
diff --git a/spec/controllers/import/phabricator_controller_spec.rb b/spec/controllers/import/phabricator_controller_spec.rb
index 85085a8e996..a127e3cda3a 100644
--- a/spec/controllers/import/phabricator_controller_spec.rb
+++ b/spec/controllers/import/phabricator_controller_spec.rb
@@ -52,7 +52,7 @@ describe Import::PhabricatorController do
namespace_id: current_user.namespace_id }
end
- it 'creates a project to import' do
+ it 'creates a project to import', :sidekiq_might_not_need_inline do
expect_next_instance_of(Gitlab::PhabricatorImport::Importer) do |importer|
expect(importer).to receive(:execute)
end
diff --git a/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb b/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb
index 6d588c8f915..ceab9754617 100644
--- a/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb
@@ -11,6 +11,14 @@ describe Ldap::OmniauthCallbacksController do
expect(request.env['warden']).to be_authenticated
end
+ context 'with sign in prevented' do
+ let(:ldap_settings) { ldap_setting_defaults.merge(prevent_ldap_sign_in: true) }
+
+ it 'does not allow sign in' do
+ expect { post provider }.to raise_error(ActionController::UrlGenerationError)
+ end
+ end
+
it 'respects remember me checkbox' do
expect do
post provider, params: { remember_me: '1' }
diff --git a/spec/controllers/metrics_controller_spec.rb b/spec/controllers/metrics_controller_spec.rb
index 7fb3578cd0a..1d378b9b9dc 100644
--- a/spec/controllers/metrics_controller_spec.rb
+++ b/spec/controllers/metrics_controller_spec.rb
@@ -23,7 +23,9 @@ describe MetricsController do
allow(Prometheus::Client.configuration).to receive(:multiprocess_files_dir).and_return(metrics_multiproc_dir)
allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(true)
allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip, whitelisted_ip_range])
- allow_any_instance_of(MetricsService).to receive(:metrics_text).and_return("prometheus_counter 1")
+ allow_next_instance_of(MetricsService) do |instance|
+ allow(instance).to receive(:metrics_text).and_return("prometheus_counter 1")
+ end
end
describe '#index' do
diff --git a/spec/controllers/projects/blame_controller_spec.rb b/spec/controllers/projects/blame_controller_spec.rb
index f901fd45604..dd7c0f45dc2 100644
--- a/spec/controllers/projects/blame_controller_spec.rb
+++ b/spec/controllers/projects/blame_controller_spec.rb
@@ -25,14 +25,25 @@ describe Projects::BlameController do
})
end
- context "valid file" do
+ context "valid branch, valid file" do
let(:id) { 'master/files/ruby/popen.rb' }
+
it { is_expected.to respond_with(:success) }
end
- context "invalid file" do
- let(:id) { 'master/files/ruby/missing_file.rb'}
- it { expect(response).to have_gitlab_http_status(404) }
+ context "valid branch, invalid file" do
+ let(:id) { 'master/files/ruby/invalid-path.rb' }
+
+ it 'redirects' do
+ expect(subject)
+ .to redirect_to("/#{project.full_path}/tree/master")
+ end
+ end
+
+ context "invalid branch, valid file" do
+ let(:id) { 'invalid-branch/files/ruby/missing_file.rb'}
+
+ it { is_expected.to respond_with(:not_found) }
end
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 17964c78e8d..78599935910 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -24,26 +24,34 @@ describe Projects::BlobController do
context "valid branch, valid file" do
let(:id) { 'master/README.md' }
+
it { is_expected.to respond_with(:success) }
end
context "valid branch, invalid file" do
let(:id) { 'master/invalid-path.rb' }
- it { is_expected.to respond_with(:not_found) }
+
+ it 'redirects' do
+ expect(subject)
+ .to redirect_to("/#{project.full_path}/tree/master")
+ end
end
context "invalid branch, valid file" do
let(:id) { 'invalid-branch/README.md' }
+
it { is_expected.to respond_with(:not_found) }
end
context "binary file" do
let(:id) { 'binary-encoding/encoding/binary-1.bin' }
+
it { is_expected.to respond_with(:success) }
end
context "Markdown file" do
let(:id) { 'master/README.md' }
+
it { is_expected.to respond_with(:success) }
end
end
@@ -104,6 +112,7 @@ describe Projects::BlobController do
context 'redirect to tree' do
let(:id) { 'markdown/doc' }
+
it 'redirects' do
expect(subject)
.to redirect_to("/#{project.full_path}/tree/markdown/doc")
@@ -311,7 +320,7 @@ describe Projects::BlobController do
default_params[:project_id] = forked_project
end
- it 'redirects to blob' do
+ it 'redirects to blob', :sidekiq_might_not_need_inline do
put :update, params: default_params
expect(response).to redirect_to(project_blob_path(forked_project, 'master/CHANGELOG'))
@@ -319,7 +328,7 @@ describe Projects::BlobController do
end
context 'when editing on the original repository' do
- it "redirects to forked project new merge request" do
+ it "redirects to forked project new merge request", :sidekiq_might_not_need_inline do
default_params[:branch_name] = "fork-test-1"
default_params[:create_merge_request] = 1
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index e1f6d571d27..5a0512a042e 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -79,7 +79,7 @@ describe Projects::ClustersController do
end
describe 'GET new' do
- def go(provider: 'gke')
+ def go(provider: 'gcp')
get :new, params: {
namespace_id: project.namespace,
project_id: project,
@@ -373,6 +373,150 @@ describe Projects::ClustersController do
end
end
+ describe 'POST #create_aws' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ provider_aws_attributes: {
+ key_name: 'key',
+ role_arn: 'arn:role',
+ region: 'region',
+ vpc_id: 'vpc',
+ instance_type: 'instance type',
+ num_nodes: 3,
+ security_group_id: 'security group',
+ subnet_ids: %w(subnet1 subnet2)
+ }
+ }
+ }
+ end
+
+ def post_create_aws
+ post :create_aws, params: params.merge(namespace_id: project.namespace, project_id: project)
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { post_create_aws }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Aws.count }
+
+ cluster = project.clusters.first
+
+ expect(response.status).to eq(201)
+ expect(response.location).to eq(project_cluster_path(project, cluster))
+ expect(cluster).to be_aws
+ expect(cluster).to be_kubernetes
+ end
+
+ context 'params are invalid' do
+ let(:params) do
+ {
+ cluster: { name: '' }
+ }
+ end
+
+ it 'does not create a cluster' do
+ expect { post_create_aws }.not_to change { Clusters::Cluster.count }
+
+ expect(response.status).to eq(422)
+ expect(response.content_type).to eq('application/json')
+ expect(response.body).to include('is invalid')
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(WaitForClusterCreationWorker).to receive(:perform_in)
+ end
+
+ it { expect { post_create_aws }.to be_allowed_for(:admin) }
+ it { expect { post_create_aws }.to be_allowed_for(:owner).of(project) }
+ it { expect { post_create_aws }.to be_allowed_for(:maintainer).of(project) }
+ it { expect { post_create_aws }.to be_denied_for(:developer).of(project) }
+ it { expect { post_create_aws }.to be_denied_for(:reporter).of(project) }
+ it { expect { post_create_aws }.to be_denied_for(:guest).of(project) }
+ it { expect { post_create_aws }.to be_denied_for(:user) }
+ it { expect { post_create_aws }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST authorize AWS role for EKS cluster' do
+ let(:role_arn) { 'arn:aws:iam::123456789012:role/role-name' }
+ let(:role_external_id) { '12345' }
+
+ let(:params) do
+ {
+ cluster: {
+ role_arn: role_arn,
+ role_external_id: role_external_id
+ }
+ }
+ end
+
+ def go
+ post :authorize_aws_role, params: params.merge(namespace_id: project.namespace, project_id: project)
+ end
+
+ it 'creates an Aws::Role record' do
+ expect { go }.to change { Aws::Role.count }
+
+ expect(response.status).to eq 201
+
+ role = Aws::Role.last
+ expect(role.user).to eq user
+ expect(role.role_arn).to eq role_arn
+ expect(role.role_external_id).to eq role_external_id
+ end
+
+ context 'role cannot be created' do
+ let(:role_arn) { 'invalid-role' }
+
+ it 'does not create a record' do
+ expect { go }.not_to change { Aws::Role.count }
+
+ expect(response.status).to eq 422
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:maintainer).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'DELETE revoke AWS role for EKS cluster' do
+ let!(:role) { create(:aws_role, user: user) }
+
+ def go
+ delete :revoke_aws_role, params: { namespace_id: project.namespace, project_id: project }
+ end
+
+ it 'deletes the Aws::Role record' do
+ expect { go }.to change { Aws::Role.count }
+
+ expect(response.status).to eq 204
+ expect(user.reload_aws_role).to be_nil
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:maintainer).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+ end
+
describe 'GET cluster_status' do
let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb
index 6ed822bbb10..d59f76c1b32 100644
--- a/spec/controllers/projects/discussions_controller_spec.rb
+++ b/spec/controllers/projects/discussions_controller_spec.rb
@@ -104,7 +104,9 @@ describe Projects::DiscussionsController do
end
it "sends notifications if all discussions are resolved" do
- expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService).to receive(:execute).with(merge_request)
+ expect_next_instance_of(MergeRequests::ResolvedDiscussionNotificationService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
post :resolve, params: request_params
end
@@ -122,8 +124,10 @@ describe Projects::DiscussionsController do
end
it "renders discussion with serializer" do
- expect_any_instance_of(DiscussionSerializer).to receive(:represent)
- .with(instance_of(Discussion), { context: instance_of(described_class), render_truncated_diff_lines: true })
+ expect_next_instance_of(DiscussionSerializer) do |instance|
+ expect(instance).to receive(:represent)
+ .with(instance_of(Discussion), { context: instance_of(described_class), render_truncated_diff_lines: true })
+ end
post :resolve, params: request_params
end
@@ -193,8 +197,10 @@ describe Projects::DiscussionsController do
end
it "renders discussion with serializer" do
- expect_any_instance_of(DiscussionSerializer).to receive(:represent)
- .with(instance_of(Discussion), { context: instance_of(described_class), render_truncated_diff_lines: true })
+ expect_next_instance_of(DiscussionSerializer) do |instance|
+ expect(instance).to receive(:represent)
+ .with(instance_of(Discussion), { context: instance_of(described_class), render_truncated_diff_lines: true })
+ end
delete :unresolve, params: request_params
end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 3fe5ff5feee..7bb956201fd 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -330,11 +330,11 @@ describe Projects::EnvironmentsController do
expect(response).to redirect_to(environment_metrics_path(environment))
end
- it 'redirects to empty page if no environment exists' do
+ it 'redirects to empty metrics page if no environment exists' do
get :metrics_redirect, params: { namespace_id: project.namespace, project_id: project }
expect(response).to be_ok
- expect(response).to render_template 'empty'
+ expect(response).to render_template 'empty_metrics'
end
end
diff --git a/spec/controllers/projects/error_tracking_controller_spec.rb b/spec/controllers/projects/error_tracking_controller_spec.rb
index 31868f5f717..8155d6ddafe 100644
--- a/spec/controllers/projects/error_tracking_controller_spec.rb
+++ b/spec/controllers/projects/error_tracking_controller_spec.rb
@@ -46,17 +46,6 @@ describe Projects::ErrorTrackingController do
end
describe 'format json' do
- shared_examples 'no data' do
- it 'returns no data' do
- get :index, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('error_tracking/index')
- expect(json_response['external_url']).to be_nil
- expect(json_response['errors']).to eq([])
- end
- end
-
let(:list_issues_service) { spy(:list_issues_service) }
let(:external_url) { 'http://example.com' }
@@ -66,6 +55,19 @@ describe Projects::ErrorTrackingController do
.and_return(list_issues_service)
end
+ context 'no data' do
+ before do
+ expect(list_issues_service).to receive(:execute)
+ .and_return(status: :error, http_status: :no_content)
+ end
+
+ it 'returns no data' do
+ get :index, params: project_params(format: :json)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
context 'service result is successful' do
before do
expect(list_issues_service).to receive(:execute)
@@ -232,8 +234,186 @@ describe Projects::ErrorTrackingController do
end
end
+ describe 'GET #issue_details' do
+ let_it_be(:issue_id) { 1234 }
+
+ let(:issue_details_service) { spy(:issue_details_service) }
+
+ let(:permitted_params) do
+ ActionController::Parameters.new(
+ { issue_id: issue_id.to_s }
+ ).permit!
+ end
+
+ before do
+ expect(ErrorTracking::IssueDetailsService)
+ .to receive(:new).with(project, user, permitted_params)
+ .and_return(issue_details_service)
+ end
+
+ describe 'format json' do
+ context 'no data' do
+ before do
+ expect(issue_details_service).to receive(:execute)
+ .and_return(status: :error, http_status: :no_content)
+ end
+
+ it 'returns no data' do
+ get :details, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'service result is successful' do
+ before do
+ expect(issue_details_service).to receive(:execute)
+ .and_return(status: :success, issue: error)
+ end
+
+ let(:error) { build(:detailed_error_tracking_error) }
+
+ it 'returns an error' do
+ get :details, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('error_tracking/issue_detailed')
+ expect(json_response['error']).to eq(error.as_json)
+ end
+ end
+
+ context 'service result is erroneous' do
+ let(:error_message) { 'error message' }
+
+ context 'without http_status' do
+ before do
+ expect(issue_details_service).to receive(:execute)
+ .and_return(status: :error, message: error_message)
+ end
+
+ it 'returns 400 with message' do
+ get :details, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+
+ context 'with explicit http_status' do
+ let(:http_status) { :no_content }
+
+ before do
+ expect(issue_details_service).to receive(:execute).and_return(
+ status: :error,
+ message: error_message,
+ http_status: http_status
+ )
+ end
+
+ it 'returns http_status with message' do
+ get :details, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(http_status)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+ end
+ end
+ end
+
+ describe 'GET #stack_trace' do
+ let_it_be(:issue_id) { 1234 }
+
+ let(:issue_stack_trace_service) { spy(:issue_stack_trace_service) }
+
+ let(:permitted_params) do
+ ActionController::Parameters.new(
+ { issue_id: issue_id.to_s }
+ ).permit!
+ end
+
+ before do
+ expect(ErrorTracking::IssueLatestEventService)
+ .to receive(:new).with(project, user, permitted_params)
+ .and_return(issue_stack_trace_service)
+ end
+
+ describe 'format json' do
+ context 'awaiting data' do
+ before do
+ expect(issue_stack_trace_service).to receive(:execute)
+ .and_return(status: :error, http_status: :no_content)
+ end
+
+ it 'returns no data' do
+ get :stack_trace, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'service result is successful' do
+ before do
+ expect(issue_stack_trace_service).to receive(:execute)
+ .and_return(status: :success, latest_event: error_event)
+ end
+
+ let(:error_event) { build(:error_tracking_error_event) }
+
+ it 'returns an error' do
+ get :stack_trace, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('error_tracking/issue_stack_trace')
+ expect(json_response['error']).to eq(error_event.as_json)
+ end
+ end
+
+ context 'service result is erroneous' do
+ let(:error_message) { 'error message' }
+
+ context 'without http_status' do
+ before do
+ expect(issue_stack_trace_service).to receive(:execute)
+ .and_return(status: :error, message: error_message)
+ end
+
+ it 'returns 400 with message' do
+ get :stack_trace, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+
+ context 'with explicit http_status' do
+ let(:http_status) { :no_content }
+
+ before do
+ expect(issue_stack_trace_service).to receive(:execute).and_return(
+ status: :error,
+ message: error_message,
+ http_status: http_status
+ )
+ end
+
+ it 'returns http_status with message' do
+ get :stack_trace, params: issue_params(issue_id: issue_id, format: :json)
+
+ expect(response).to have_gitlab_http_status(http_status)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+ end
+ end
+ end
+
private
+ def issue_params(opts = {})
+ project_params.reverse_merge(opts)
+ end
+
def project_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
diff --git a/spec/controllers/projects/grafana_api_controller_spec.rb b/spec/controllers/projects/grafana_api_controller_spec.rb
index 352a364295b..0ef96514961 100644
--- a/spec/controllers/projects/grafana_api_controller_spec.rb
+++ b/spec/controllers/projects/grafana_api_controller_spec.rb
@@ -94,4 +94,75 @@ describe Projects::GrafanaApiController do
end
end
end
+
+ describe 'GET #metrics_dashboard' do
+ let(:service_result) { { status: :success, dashboard: '{}' } }
+ let(:params) do
+ {
+ format: :json,
+ embedded: true,
+ grafana_url: 'https://grafana.example.com',
+ namespace_id: project.namespace.full_path,
+ project_id: project.name
+ }
+ end
+
+ before do
+ allow(Gitlab::Metrics::Dashboard::Finder)
+ .to receive(:find)
+ .and_return(service_result)
+ end
+
+ context 'when the result is still processing' do
+ let(:service_result) { nil }
+
+ it 'returns 204 no content' do
+ get :metrics_dashboard, params: params
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when the result was successful' do
+ it 'returns the dashboard response' do
+ get :metrics_dashboard, params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({
+ 'dashboard' => '{}',
+ 'status' => 'success'
+ })
+ end
+ end
+
+ context 'when an error has occurred' do
+ shared_examples_for 'error response' do |http_status|
+ it "returns #{http_status}" do
+ get :metrics_dashboard, params: params
+
+ expect(response).to have_gitlab_http_status(http_status)
+ expect(json_response['status']).to eq('error')
+ expect(json_response['message']).to eq('error message')
+ end
+ end
+
+ context 'with an error accessing grafana' do
+ let(:service_result) do
+ {
+ http_status: :service_unavailable,
+ status: :error,
+ message: 'error message'
+ }
+ end
+
+ it_behaves_like 'error response', :service_unavailable
+ end
+
+ context 'with a processing error' do
+ let(:service_result) { { status: :error, message: 'error message' } }
+
+ it_behaves_like 'error response', :bad_request
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index d36336a9f67..8770a5ee303 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1252,7 +1252,7 @@ describe Projects::IssuesController do
stub_feature_flags(create_confidential_merge_request: true)
end
- it 'creates a new merge request' do
+ it 'creates a new merge request', :sidekiq_might_not_need_inline do
expect { create_merge_request }.to change(target_project.merge_requests, :count).by(1)
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 90ccb884927..349d73f13ca 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -154,7 +154,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
.and_return(merge_request)
end
- it 'does not serialize builds in exposed stages' do
+ it 'does not serialize builds in exposed stages', :sidekiq_might_not_need_inline do
get_show_json
json_response.dig('pipeline', 'details', 'stages').tap do |stages|
@@ -183,7 +183,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'job is cancelable' do
let(:job) { create(:ci_build, :running, pipeline: pipeline) }
- it 'cancel_path is present with correct redirect' do
+ it 'cancel_path is present with correct redirect', :sidekiq_might_not_need_inline do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['cancel_path']).to include(CGI.escape(json_response['build_path']))
@@ -193,7 +193,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'with web terminal' do
let(:job) { create(:ci_build, :running, :with_runner_session, pipeline: pipeline) }
- it 'exposes the terminal path' do
+ it 'exposes the terminal path', :sidekiq_might_not_need_inline do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['terminal_path']).to match(%r{/terminal})
@@ -268,7 +268,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
project.add_maintainer(user) # Need to be a maintianer to view cluster.path
end
- it 'exposes the deployment information' do
+ it 'exposes the deployment information', :sidekiq_might_not_need_inline do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
@@ -292,7 +292,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
sign_in(user)
end
- it 'user can edit runner' do
+ it 'user can edit runner', :sidekiq_might_not_need_inline do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
@@ -312,7 +312,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
sign_in(user)
end
- it 'user can not edit runner' do
+ it 'user can not edit runner', :sidekiq_might_not_need_inline do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
@@ -331,7 +331,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
sign_in(user)
end
- it 'user can not edit runner' do
+ it 'user can not edit runner', :sidekiq_might_not_need_inline do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
@@ -412,7 +412,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'when job has trace' do
let(:job) { create(:ci_build, :running, :trace_live, pipeline: pipeline) }
- it "has_trace is true" do
+ it "has_trace is true", :sidekiq_might_not_need_inline do
get_show_json
expect(response).to match_response_schema('job/job_details')
@@ -458,7 +458,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
end
- context 'user is a maintainer' do
+ context 'user is a maintainer', :sidekiq_might_not_need_inline do
before do
project.add_maintainer(user)
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index ff089df37f7..aee017b211a 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -204,6 +204,24 @@ describe Projects::LabelsController do
expect(response).to redirect_to(project_labels_path(project))
expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, project))
end
+
+ context 'with an AJAX request' do
+ it 'redirects to the canonical path but does not set flash message' do
+ get :index, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }, xhr: true
+
+ expect(response).to redirect_to(project_labels_path(project))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+
+ context 'with JSON format' do
+ it 'redirects to the canonical path but does not set flash message' do
+ get :index, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }, format: :json
+
+ expect(response).to redirect_to(project_labels_path(project, format: :json))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects/mattermosts_controller_spec.rb b/spec/controllers/projects/mattermosts_controller_spec.rb
index 45125385d9e..64440ed585d 100644
--- a/spec/controllers/projects/mattermosts_controller_spec.rb
+++ b/spec/controllers/projects/mattermosts_controller_spec.rb
@@ -13,8 +13,9 @@ describe Projects::MattermostsController do
describe 'GET #new' do
before do
- allow_any_instance_of(MattermostSlashCommandsService)
- .to receive(:list_teams).and_return([])
+ allow_next_instance_of(MattermostSlashCommandsService) do |instance|
+ allow(instance).to receive(:list_teams).and_return([])
+ end
end
it 'accepts the request' do
@@ -42,7 +43,9 @@ describe Projects::MattermostsController do
context 'no request can be made to mattermost' do
it 'shows the error' do
- allow_any_instance_of(MattermostSlashCommandsService).to receive(:configure).and_return([false, "error message"])
+ allow_next_instance_of(MattermostSlashCommandsService) do |instance|
+ allow(instance).to receive(:configure).and_return([false, "error message"])
+ end
expect(subject).to redirect_to(new_project_mattermost_url(project))
end
@@ -50,7 +53,9 @@ describe Projects::MattermostsController do
context 'the request is succesull' do
before do
- allow_any_instance_of(Mattermost::Command).to receive(:create).and_return('token')
+ allow_next_instance_of(Mattermost::Command) do |instance|
+ allow(instance).to receive(:create).and_return('token')
+ end
end
it 'redirects to the new page' do
diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
index ce977f26ec6..1bbb80f9904 100644
--- a/spec/controllers/projects/merge_requests/creations_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
@@ -85,7 +85,9 @@ describe Projects::MergeRequests::CreationsController do
describe 'GET diffs' do
context 'when merge request cannot be created' do
it 'does not assign diffs var' do
- allow_any_instance_of(MergeRequest).to receive(:can_be_created).and_return(false)
+ allow_next_instance_of(MergeRequest) do |instance|
+ allow(instance).to receive(:can_be_created).and_return(false)
+ end
get :diffs, params: get_diff_params.merge(format: 'json')
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 5c02e8d6461..06d9af33189 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -34,6 +34,16 @@ describe Projects::MergeRequests::DiffsController do
it 'saves the preferred diff view in a cookie' do
expect(response.cookies['diff_view']).to eq('parallel')
end
+
+ it 'only renders the required view', :aggregate_failures do
+ diff_files_without_deletions = json_response['diff_files'].reject { |f| f['deleted_file'] }
+ have_no_inline_diff_lines = satisfy('have no inline diff lines') do |diff_file|
+ !diff_file.has_key?('highlighted_diff_lines')
+ end
+
+ expect(diff_files_without_deletions).to all(have_key('parallel_diff_lines'))
+ expect(diff_files_without_deletions).to all(have_no_inline_diff_lines)
+ end
end
context 'when the user cannot view the merge request' do
@@ -76,7 +86,9 @@ describe Projects::MergeRequests::DiffsController do
end
it 'serializes merge request diff collection' do
- expect_any_instance_of(DiffsSerializer).to receive(:represent).with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), an_instance_of(Hash))
+ expect_next_instance_of(DiffsSerializer) do |instance|
+ expect(instance).to receive(:represent).with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), an_instance_of(Hash))
+ end
go
end
@@ -88,7 +100,9 @@ describe Projects::MergeRequests::DiffsController do
end
it 'serializes merge request diff collection' do
- expect_any_instance_of(DiffsSerializer).to receive(:represent).with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), an_instance_of(Hash))
+ expect_next_instance_of(DiffsSerializer) do |instance|
+ expect(instance).to receive(:represent).with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), an_instance_of(Hash))
+ end
go
end
@@ -259,7 +273,7 @@ describe Projects::MergeRequests::DiffsController do
it 'only renders the diffs for the path given' do
diff_for_path(old_path: existing_path, new_path: existing_path)
- paths = json_response["diff_files"].map { |file| file['new_path'] }
+ paths = json_response['diff_files'].map { |file| file['new_path'] }
expect(paths).to include(existing_path)
end
@@ -344,6 +358,7 @@ describe Projects::MergeRequests::DiffsController do
let(:expected_options) do
{
merge_request: merge_request,
+ diff_view: :inline,
pagination_data: {
current_page: 1,
next_page: nil,
@@ -367,6 +382,7 @@ describe Projects::MergeRequests::DiffsController do
let(:expected_options) do
{
merge_request: merge_request,
+ diff_view: :inline,
pagination_data: {
current_page: 2,
next_page: 3,
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index ea702792557..9f7fde2f0da 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Projects::MergeRequestsController do
include ProjectForksHelper
+ include Gitlab::Routing
let(:project) { create(:project, :repository) }
let(:user) { project.owner }
@@ -206,7 +207,7 @@ describe Projects::MergeRequestsController do
it 'redirects to last_page if page number is larger than number of pages' do
get_merge_requests(last_page + 1)
- expect(response).to redirect_to(namespace_project_merge_requests_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
+ expect(response).to redirect_to(project_merge_requests_path(project, page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
end
it 'redirects to specified page' do
@@ -227,7 +228,7 @@ describe Projects::MergeRequestsController do
host: external_host
}
- expect(response).to redirect_to(namespace_project_merge_requests_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
+ expect(response).to redirect_to(project_merge_requests_path(project, page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
end
end
@@ -404,7 +405,7 @@ describe Projects::MergeRequestsController do
end
it 'starts the merge immediately with permitted params' do
- expect(MergeWorker).to receive(:perform_async).with(merge_request.id, anything, { 'squash' => false })
+ expect(MergeWorker).to receive(:perform_async).with(merge_request.id, anything, { 'sha' => merge_request.diff_head_sha })
merge_with_sha
end
@@ -430,10 +431,15 @@ describe Projects::MergeRequestsController do
context 'when a squash commit message is passed' do
let(:message) { 'My custom squash commit message' }
- it 'passes the same message to SquashService' do
- params = { squash: '1', squash_commit_message: message }
+ it 'passes the same message to SquashService', :sidekiq_might_not_need_inline do
+ params = { squash: '1',
+ squash_commit_message: message,
+ sha: merge_request.diff_head_sha }
+ expected_squash_params = { squash_commit_message: message,
+ sha: merge_request.diff_head_sha,
+ merge_request: merge_request }
- expect_next_instance_of(MergeRequests::SquashService, project, user, params.merge(merge_request: merge_request)) do |squash_service|
+ expect_next_instance_of(MergeRequests::SquashService, project, user, expected_squash_params) do |squash_service|
expect(squash_service).to receive(:execute).and_return({
status: :success,
squash_sha: SecureRandom.hex(20)
@@ -723,7 +729,7 @@ describe Projects::MergeRequestsController do
context 'with private builds' do
context 'for the target project member' do
- it 'does not respond with serialized pipelines' do
+ it 'does not respond with serialized pipelines', :sidekiq_might_not_need_inline do
expect(json_response['pipelines']).to be_empty
expect(json_response['count']['all']).to eq(0)
expect(response).to include_pagination_headers
@@ -733,7 +739,7 @@ describe Projects::MergeRequestsController do
context 'for the source project member' do
let(:user) { fork_user }
- it 'responds with serialized pipelines' do
+ it 'responds with serialized pipelines', :sidekiq_might_not_need_inline do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
@@ -749,7 +755,7 @@ describe Projects::MergeRequestsController do
end
context 'for the target project member' do
- it 'does not respond with serialized pipelines' do
+ it 'does not respond with serialized pipelines', :sidekiq_might_not_need_inline do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
@@ -759,7 +765,7 @@ describe Projects::MergeRequestsController do
context 'for the source project member' do
let(:user) { fork_user }
- it 'responds with serialized pipelines' do
+ it 'responds with serialized pipelines', :sidekiq_might_not_need_inline do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
@@ -770,6 +776,172 @@ describe Projects::MergeRequestsController do
end
end
+ describe 'GET exposed_artifacts' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ source_project: project)
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :success,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+
+ let!(:job) { create(:ci_build, pipeline: pipeline, options: job_options) }
+ let!(:job_metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:find_exposed_artifacts)
+ .and_return(report)
+
+ allow_any_instance_of(MergeRequest)
+ .to receive(:actual_head_pipeline)
+ .and_return(pipeline)
+ end
+
+ subject do
+ get :exposed_artifacts, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ },
+ format: :json
+ end
+
+ describe 'permissions on a public project with private CI/CD' do
+ let(:project) { create :project, :repository, :public, :builds_private }
+ let(:report) { { status: :parsed, data: [] } }
+ let(:job_options) { {} }
+
+ context 'while signed out' do
+ before do
+ sign_out(user)
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ expect(response.body).to be_blank
+ end
+ end
+
+ context 'while signed in as an unrelated user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ expect(response.body).to be_blank
+ end
+ end
+ end
+
+ context 'when pipeline has jobs with exposed artifacts' do
+ let(:job_options) do
+ {
+ artifacts: {
+ paths: ['ci_artifacts.txt'],
+ expose_as: 'Exposed artifact'
+ }
+ }
+ end
+
+ context 'when fetching exposed artifacts is in progress' do
+ let(:report) { { status: :parsing } }
+
+ it 'sends polling interval' do
+ expect(Gitlab::PollingInterval).to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 204 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when fetching exposed artifacts is completed' do
+ let(:data) do
+ Ci::GenerateExposedArtifactsReportService.new(project, user)
+ .execute(nil, pipeline)
+ end
+
+ let(:report) { { status: :parsed, data: data } }
+
+ it 'returns exposed artifacts' do
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['status']).to eq('parsed')
+ expect(json_response['data']).to eq([{
+ 'job_name' => 'test',
+ 'job_path' => project_job_path(project, job),
+ 'url' => file_project_job_artifacts_path(project, job, 'ci_artifacts.txt'),
+ 'text' => 'Exposed artifact'
+ }])
+ end
+ end
+
+ context 'when feature flag :ci_expose_arbitrary_artifacts_in_mr is disabled' do
+ let(:job_options) do
+ {
+ artifacts: {
+ paths: ['ci_artifacts.txt'],
+ expose_as: 'Exposed artifact'
+ }
+ }
+ end
+ let(:report) { double }
+
+ before do
+ stub_feature_flags(ci_expose_arbitrary_artifacts_in_mr: false)
+ end
+
+ it 'does not send polling interval' do
+ expect(Gitlab::PollingInterval).not_to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 204 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+ end
+
+ context 'when pipeline does not have jobs with exposed artifacts' do
+ let(:report) { double }
+ let(:job_options) do
+ {
+ artifacts: {
+ paths: ['ci_artifacts.txt']
+ }
+ }
+ end
+
+ it 'returns no content' do
+ subject
+
+ expect(response).to have_gitlab_http_status(204)
+ expect(response.body).to be_empty
+ end
+ end
+ end
+
describe 'GET test_reports' do
let(:merge_request) do
create(:merge_request,
@@ -879,23 +1051,6 @@ describe Projects::MergeRequestsController do
expect(json_response).to eq({ 'status_reason' => 'Failed to parse test reports' })
end
end
-
- context 'when something went wrong on our system' do
- let(:comparison_status) { {} }
-
- it 'does not send polling interval' do
- expect(Gitlab::PollingInterval).not_to receive(:set_header)
-
- subject
- end
-
- it 'returns 500 HTTP status' do
- subject
-
- expect(response).to have_gitlab_http_status(:internal_server_error)
- expect(json_response).to eq({ 'status_reason' => 'Unknown error' })
- end
- end
end
describe 'POST remove_wip' do
@@ -1019,13 +1174,13 @@ describe Projects::MergeRequestsController do
create(:merge_request, source_project: forked, target_project: project, target_branch: 'master', head_pipeline: pipeline)
end
- it 'links to the environment on that project' do
+ it 'links to the environment on that project', :sidekiq_might_not_need_inline do
get_ci_environments_status
expect(json_response.first['url']).to match /#{forked.full_path}/
end
- context "when environment_target is 'merge_commit'" do
+ context "when environment_target is 'merge_commit'", :sidekiq_might_not_need_inline do
it 'returns nothing' do
get_ci_environments_status(environment_target: 'merge_commit')
@@ -1056,13 +1211,13 @@ describe Projects::MergeRequestsController do
# we're trying to reduce the overall number of queries for this method.
# set a hard limit for now. https://gitlab.com/gitlab-org/gitlab-foss/issues/52287
- it 'keeps queries in check' do
+ it 'keeps queries in check', :sidekiq_might_not_need_inline do
control_count = ActiveRecord::QueryRecorder.new { get_ci_environments_status }.count
expect(control_count).to be <= 137
end
- it 'has no N+1 SQL issues for environments', :request_store, retry: 0 do
+ it 'has no N+1 SQL issues for environments', :request_store, :sidekiq_might_not_need_inline, retry: 0 do
# First run to insert test data from lets, which does take up some 30 queries
get_ci_environments_status
@@ -1225,6 +1380,33 @@ describe Projects::MergeRequestsController do
end
end
+ context 'with SELECT FOR UPDATE lock' do
+ before do
+ stub_feature_flags(merge_request_rebase_nowait_lock: false)
+ end
+
+ it 'executes rebase' do
+ allow_any_instance_of(MergeRequest).to receive(:with_lock).with(true).and_call_original
+ expect(RebaseWorker).to receive(:perform_async)
+
+ post_rebase
+
+ expect(response.status).to eq(200)
+ end
+ end
+
+ context 'with NOWAIT lock' do
+ it 'returns a 409' do
+ allow_any_instance_of(MergeRequest).to receive(:with_lock).with('FOR UPDATE NOWAIT').and_raise(ActiveRecord::LockWaitTimeout)
+ expect(RebaseWorker).not_to receive(:perform_async)
+
+ post_rebase
+
+ expect(response.status).to eq(409)
+ expect(json_response['merge_error']).to eq(MergeRequest::REBASE_LOCK_MESSAGE)
+ end
+ end
+
context 'with a forked project' do
let(:forked_project) { fork_project(project, fork_owner, repository: true) }
let(:fork_owner) { create(:user) }
@@ -1253,7 +1435,7 @@ describe Projects::MergeRequestsController do
sign_in(fork_owner)
end
- it 'returns 200' do
+ it 'returns 200', :sidekiq_might_not_need_inline do
expect_rebase_worker_for(fork_owner)
post_rebase
diff --git a/spec/controllers/projects/mirrors_controller_spec.rb b/spec/controllers/projects/mirrors_controller_spec.rb
index fb3dd75460a..e14686970a1 100644
--- a/spec/controllers/projects/mirrors_controller_spec.rb
+++ b/spec/controllers/projects/mirrors_controller_spec.rb
@@ -51,10 +51,6 @@ describe Projects::MirrorsController do
sign_in(project.owner)
end
- around do |example|
- Sidekiq::Testing.fake! { example.run }
- end
-
context 'With valid URL for a push' do
let(:remote_mirror_attributes) do
{ "0" => { "enabled" => "0", url: 'https://updated.example.com' } }
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 3ab191c0032..e576a3d2d40 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -518,7 +518,7 @@ describe Projects::NotesController do
project.id && Project.maximum(:id).succ
end
- it 'returns a 404' do
+ it 'returns a 404', :sidekiq_might_not_need_inline do
create!
expect(response).to have_gitlab_http_status(404)
end
@@ -527,13 +527,13 @@ describe Projects::NotesController do
context 'when the user has no access to the fork' do
let(:fork_visibility) { Gitlab::VisibilityLevel::PRIVATE }
- it 'returns a 404' do
+ it 'returns a 404', :sidekiq_might_not_need_inline do
create!
expect(response).to have_gitlab_http_status(404)
end
end
- context 'when the user has access to the fork' do
+ context 'when the user has access to the fork', :sidekiq_might_not_need_inline do
let!(:discussion) { forked_project.notes.find_discussion(existing_comment.discussion_id) }
let(:fork_visibility) { Gitlab::VisibilityLevel::PUBLIC }
@@ -785,7 +785,9 @@ describe Projects::NotesController do
end
it "sends notifications if all discussions are resolved" do
- expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService).to receive(:execute).with(merge_request)
+ expect_next_instance_of(MergeRequests::ResolvedDiscussionNotificationService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
post :resolve, params: request_params
end
diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb
index 032f4f1418f..3987bebb124 100644
--- a/spec/controllers/projects/pages_domains_controller_spec.rb
+++ b/spec/controllers/projects/pages_domains_controller_spec.rb
@@ -32,10 +32,10 @@ describe Projects::PagesDomainsController do
get(:show, params: request_params.merge(id: pages_domain.domain))
end
- it "displays the 'show' page" do
+ it "redirects to the 'edit' page" do
make_request
- expect(response).to have_gitlab_http_status(200)
- expect(response).to render_template('show')
+
+ expect(response).to redirect_to(edit_project_pages_domain_path(project, pages_domain.domain))
end
context 'when user is developer' do
@@ -69,7 +69,7 @@ describe Projects::PagesDomainsController do
created_domain = PagesDomain.reorder(:id).last
expect(created_domain).to be_present
- expect(response).to redirect_to(project_pages_domain_path(project, created_domain))
+ expect(response).to redirect_to(edit_project_pages_domain_path(project, created_domain))
end
end
@@ -160,7 +160,7 @@ describe Projects::PagesDomainsController do
post :verify, params: params
- expect(response).to redirect_to project_pages_domain_path(project, pages_domain)
+ expect(response).to redirect_to edit_project_pages_domain_path(project, pages_domain)
expect(flash[:notice]).to eq('Successfully verified domain ownership')
end
@@ -169,7 +169,7 @@ describe Projects::PagesDomainsController do
post :verify, params: params
- expect(response).to redirect_to project_pages_domain_path(project, pages_domain)
+ expect(response).to redirect_to edit_project_pages_domain_path(project, pages_domain)
expect(flash[:alert]).to eq('Failed to verify domain ownership')
end
@@ -190,6 +190,56 @@ describe Projects::PagesDomainsController do
end
end
+ describe 'DELETE #clean_certificate' do
+ subject do
+ delete(:clean_certificate, params: request_params.merge(id: pages_domain.domain))
+ end
+
+ it 'redirects to edit page' do
+ subject
+
+ expect(response).to redirect_to(edit_project_pages_domain_path(project, pages_domain))
+ end
+
+ it 'removes certificate' do
+ expect do
+ subject
+ end.to change { pages_domain.reload.certificate }.to(nil)
+ .and change { pages_domain.reload.key }.to(nil)
+ end
+
+ it 'sets certificate source to user_provided' do
+ pages_domain.update!(certificate_source: :gitlab_provided)
+
+ expect do
+ subject
+ end.to change { pages_domain.reload.certificate_source }.from("gitlab_provided").to("user_provided")
+ end
+
+ context 'when pages_https_only is set' do
+ before do
+ project.update!(pages_https_only: true)
+ stub_pages_setting(external_https: '127.0.0.1')
+ end
+
+ it 'does not remove certificate' do
+ subject
+
+ pages_domain.reload
+ expect(pages_domain.certificate).to be_present
+ expect(pages_domain.key).to be_present
+ end
+
+ it 'redirects to edit page with a flash message' do
+ subject
+
+ expect(flash[:alert]).to include('Certificate')
+ expect(flash[:alert]).to include('Key')
+ expect(response).to redirect_to(edit_project_pages_domain_path(project, pages_domain))
+ end
+ end
+ end
+
context 'pages disabled' do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(false)
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index e3ad36f8d24..3c7f69f0e6e 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -93,7 +93,7 @@ describe Projects::PipelinesController do
end
context 'when performing gitaly calls', :request_store do
- it 'limits the Gitaly requests' do
+ it 'limits the Gitaly requests', :sidekiq_might_not_need_inline do
# Isolate from test preparation (Repository#exists? is also cached in RequestStore)
RequestStore.end!
RequestStore.clear!
@@ -149,7 +149,7 @@ describe Projects::PipelinesController do
end
describe 'GET show.json' do
- let(:pipeline) { create(:ci_pipeline_with_one_job, project: project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
it 'returns the pipeline' do
get_pipeline_json
@@ -571,7 +571,7 @@ describe Projects::PipelinesController do
format: :json
end
- it 'cancels a pipeline without returning any content' do
+ it 'cancels a pipeline without returning any content', :sidekiq_might_not_need_inline do
expect(response).to have_gitlab_http_status(:no_content)
expect(pipeline.reload).to be_canceled
end
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 2f473d395ad..072df1f5060 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -45,7 +45,9 @@ describe Projects::ProjectMembersController do
end
it 'adds user to members' do
- expect_any_instance_of(Members::CreateService).to receive(:execute).and_return(status: :success)
+ expect_next_instance_of(Members::CreateService) do |instance|
+ expect(instance).to receive(:execute).and_return(status: :success)
+ end
post :create, params: {
namespace_id: project.namespace,
@@ -59,7 +61,9 @@ describe Projects::ProjectMembersController do
end
it 'adds no user to members' do
- expect_any_instance_of(Members::CreateService).to receive(:execute).and_return(status: :failure, message: 'Message')
+ expect_next_instance_of(Members::CreateService) do |instance|
+ expect(instance).to receive(:execute).and_return(status: :failure, message: 'Message')
+ end
post :create, params: {
namespace_id: project.namespace,
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index 17f9483be98..afdb8bbc983 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -85,7 +85,9 @@ describe Projects::Prometheus::MetricsController do
end
it 'calls prometheus adapter service' do
- expect_any_instance_of(::Prometheus::AdapterService).to receive(:prometheus_adapter)
+ expect_next_instance_of(::Prometheus::AdapterService) do |instance|
+ expect(instance).to receive(:prometheus_adapter)
+ end
subject.__send__(:prometheus_adapter)
end
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index 5b9d21d3d5b..562119d967f 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -3,10 +3,36 @@
require 'spec_helper'
describe Projects::ReleasesController do
- let!(:project) { create(:project, :repository, :public) }
- let!(:user) { create(:user) }
+ let!(:project) { create(:project, :repository, :public) }
+ let!(:private_project) { create(:project, :repository, :private) }
+ let(:user) { developer }
+ let(:developer) { create(:user) }
+ let(:reporter) { create(:user) }
+ let!(:release_1) { create(:release, project: project, released_at: Time.zone.parse('2018-10-18')) }
+ let!(:release_2) { create(:release, project: project, released_at: Time.zone.parse('2019-10-19')) }
- describe 'GET #index' do
+ before do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ shared_examples_for 'successful request' do
+ it 'renders a 200' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
+ shared_examples_for 'not found' do
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ shared_examples 'common access controls' do
it 'renders a 200' do
get_index
@@ -14,36 +40,135 @@ describe Projects::ReleasesController do
end
context 'when the project is private' do
- let!(:project) { create(:project, :repository, :private) }
+ let(:project) { private_project }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when user is a developer' do
+ let(:user) { developer }
- it 'renders a 302' do
- get_index
+ it 'renders a 200 for a logged in developer' do
+ sign_in(user)
- expect(response.status).to eq(302)
+ get_index
+
+ expect(response.status).to eq(200)
+ end
end
- it 'renders a 200 for a logged in developer' do
- project.add_developer(user)
- sign_in(user)
+ context 'when user is an external user' do
+ let(:user) { create(:user) }
- get_index
+ it 'renders a 404 when logged in but not in the project' do
+ sign_in(user)
- expect(response.status).to eq(200)
+ get_index
+
+ expect(response.status).to eq(404)
+ end
end
+ end
+ end
- it 'renders a 404 when logged in but not in the project' do
- sign_in(user)
+ describe 'GET #index' do
+ before do
+ get_index
+ end
- get_index
+ context 'as html' do
+ let(:format) { :html }
- expect(response.status).to eq(404)
+ it 'returns a text/html content_type' do
+ expect(response.content_type).to eq 'text/html'
end
+
+ it_behaves_like 'common access controls'
+
+ context 'when the project is private and the user is not logged in' do
+ let(:project) { private_project }
+
+ it 'returns a redirect' do
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+ end
+
+ context 'as json' do
+ let(:format) { :json }
+
+ it 'returns an application/json content_type' do
+ expect(response.content_type).to eq 'application/json'
+ end
+
+ it "returns the project's releases as JSON, ordered by released_at" do
+ expect(response.body).to eq([release_2, release_1].to_json)
+ end
+
+ it_behaves_like 'common access controls'
+
+ context 'when the project is private and the user is not logged in' do
+ let(:project) { private_project }
+
+ it 'returns a redirect' do
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+ end
+ end
+
+ describe 'GET #edit' do
+ subject do
+ get :edit, params: { namespace_id: project.namespace, project_id: project, tag: tag }
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ let!(:release) { create(:release, project: project) }
+ let(:tag) { CGI.escape(release.tag) }
+
+ it_behaves_like 'successful request'
+
+ context 'when tag name contains slash' do
+ let!(:release) { create(:release, project: project, tag: 'awesome/v1.0') }
+ let(:tag) { CGI.escape(release.tag) }
+
+ it_behaves_like 'successful request'
+
+ it 'is accesible at a URL encoded path' do
+ expect(edit_project_release_path(project, release))
+ .to eq("/#{project.namespace.path}/#{project.name}/-/releases/awesome%252Fv1.0/edit")
+ end
+ end
+
+ context 'when feature flag `release_edit_page` is disabled' do
+ before do
+ stub_feature_flags(release_edit_page: false)
+ end
+
+ it_behaves_like 'not found'
+ end
+
+ context 'when release does not exist' do
+ let!(:release) { }
+ let(:tag) { 'non-existent-tag' }
+
+ it_behaves_like 'not found'
+ end
+
+ context 'when user is a reporter' do
+ let(:user) { reporter }
+
+ it_behaves_like 'not found'
end
end
private
def get_index
- get :index, params: { namespace_id: project.namespace, project_id: project }
+ get :index, params: { namespace_id: project.namespace, project_id: project, format: format }
end
end
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index eccc8e1d5de..73fb0fad646 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -13,6 +13,10 @@ describe Projects::Serverless::FunctionsController do
let(:environment) { create(:environment, project: project) }
let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
let(:knative_services_finder) { environment.knative_services_finder }
+ let(:function_description) { 'A serverless function' }
+ let(:knative_stub_options) do
+ { namespace: namespace.namespace, name: cluster.project.name, description: function_description }
+ end
let(:namespace) do
create(:cluster_kubernetes_namespace,
@@ -114,40 +118,33 @@ describe Projects::Serverless::FunctionsController do
expect(response).to have_gitlab_http_status(200)
expect(json_response).to include(
- "name" => project.name,
- "url" => "http://#{project.name}.#{namespace.namespace}.example.com",
- "podcount" => 1
+ 'name' => project.name,
+ 'url' => "http://#{project.name}.#{namespace.namespace}.example.com",
+ 'description' => function_description,
+ 'podcount' => 1
)
end
end
- context 'on Knative 0.5' do
+ context 'on Knative 0.5.0' do
+ before do
+ prepare_knative_stubs(knative_05_service(knative_stub_options))
+ end
+
+ include_examples 'GET #show with valid data'
+ end
+
+ context 'on Knative 0.6.0' do
before do
- stub_kubeclient_service_pods
- stub_reactive_cache(knative_services_finder,
- {
- services: kube_knative_services_body(
- legacy_knative: true,
- namespace: namespace.namespace,
- name: cluster.project.name
- )["items"],
- pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- },
- *knative_services_finder.cache_args)
+ prepare_knative_stubs(knative_06_service(knative_stub_options))
end
include_examples 'GET #show with valid data'
end
- context 'on Knative 0.6 or 0.7' do
+ context 'on Knative 0.7.0' do
before do
- stub_kubeclient_service_pods
- stub_reactive_cache(knative_services_finder,
- {
- services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"],
- pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- },
- *knative_services_finder.cache_args)
+ prepare_knative_stubs(knative_07_service(knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -172,11 +169,12 @@ describe Projects::Serverless::FunctionsController do
expect(response).to have_gitlab_http_status(200)
expect(json_response).to match({
- "knative_installed" => "checking",
- "functions" => [
+ 'knative_installed' => 'checking',
+ 'functions' => [
a_hash_including(
- "name" => project.name,
- "url" => "http://#{project.name}.#{namespace.namespace}.example.com"
+ 'name' => project.name,
+ 'url' => "http://#{project.name}.#{namespace.namespace}.example.com",
+ 'description' => function_description
)
]
})
@@ -189,36 +187,38 @@ describe Projects::Serverless::FunctionsController do
end
end
- context 'on Knative 0.5' do
+ context 'on Knative 0.5.0' do
before do
- stub_kubeclient_service_pods
- stub_reactive_cache(knative_services_finder,
- {
- services: kube_knative_services_body(
- legacy_knative: true,
- namespace: namespace.namespace,
- name: cluster.project.name
- )["items"],
- pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- },
- *knative_services_finder.cache_args)
+ prepare_knative_stubs(knative_05_service(knative_stub_options))
end
include_examples 'GET #index with data'
end
- context 'on Knative 0.6 or 0.7' do
+ context 'on Knative 0.6.0' do
before do
- stub_kubeclient_service_pods
- stub_reactive_cache(knative_services_finder,
- {
- services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"],
- pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
- },
- *knative_services_finder.cache_args)
+ prepare_knative_stubs(knative_06_service(knative_stub_options))
end
include_examples 'GET #index with data'
end
+
+ context 'on Knative 0.7.0' do
+ before do
+ prepare_knative_stubs(knative_07_service(knative_stub_options))
+ end
+
+ include_examples 'GET #index with data'
+ end
+ end
+
+ def prepare_knative_stubs(knative_service)
+ stub_kubeclient_service_pods
+ stub_reactive_cache(knative_services_finder,
+ {
+ services: [knative_service],
+ pods: kube_knative_pods_body(cluster.project.name, namespace.namespace)["items"]
+ },
+ *knative_services_finder.cache_args)
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index c67e7f7dadd..98f8826397f 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -125,7 +125,9 @@ describe Projects::Settings::CiCdController do
context 'when run_auto_devops_pipeline is true' do
before do
- expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(true)
+ expect_next_instance_of(Projects::UpdateService) do |instance|
+ expect(instance).to receive(:run_auto_devops_pipeline?).and_return(true)
+ end
end
context 'when the project repository is empty' do
@@ -159,7 +161,9 @@ describe Projects::Settings::CiCdController do
context 'when run_auto_devops_pipeline is not true' do
before do
- expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(false)
+ expect_next_instance_of(Projects::UpdateService) do |instance|
+ expect(instance).to receive(:run_auto_devops_pipeline?).and_return(false)
+ end
end
it 'does not queue a CreatePipelineWorker' do
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index 0b34656e9e2..667a6336952 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -186,7 +186,8 @@ describe Projects::Settings::OperationsController do
{
grafana_integration_attributes: {
grafana_url: 'https://grafana.gitlab.com',
- token: 'eyJrIjoicDRlRTREdjhhOEZ5WjZPWXUzazJOSW0zZHJUejVOd3IiLCJuIjoiVGVzdCBLZXkiLCJpZCI6MX0='
+ token: 'eyJrIjoicDRlRTREdjhhOEZ5WjZPWXUzazJOSW0zZHJUejVOd3IiLCJuIjoiVGVzdCBLZXkiLCJpZCI6MX0=',
+ enabled: 'true'
}
}
end
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index 042a5542786..d372a94db56 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -92,7 +92,9 @@ describe Projects::SnippetsController do
context 'when the snippet is spam' do
before do
- allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive(:spam?).and_return(true)
+ end
end
context 'when the snippet is private' do
@@ -170,7 +172,9 @@ describe Projects::SnippetsController do
context 'when the snippet is spam' do
before do
- allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive(:spam?).and_return(true)
+ end
end
context 'when the snippet is private' do
@@ -278,7 +282,9 @@ describe Projects::SnippetsController do
let(:snippet) { create(:project_snippet, :private, project: project, author: user) }
before do
- allow_any_instance_of(AkismetService).to receive_messages(submit_spam: true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive_messages(submit_spam: true)
+ end
stub_application_setting(akismet_enabled: true)
end
diff --git a/spec/controllers/projects/tree_controller_spec.rb b/spec/controllers/projects/tree_controller_spec.rb
index 7f7cabe3b0c..c0c11db5dd6 100644
--- a/spec/controllers/projects/tree_controller_spec.rb
+++ b/spec/controllers/projects/tree_controller_spec.rb
@@ -30,46 +30,61 @@ describe Projects::TreeController do
context "valid branch, no path" do
let(:id) { 'master' }
+
it { is_expected.to respond_with(:success) }
end
context "valid branch, valid path" do
let(:id) { 'master/encoding/' }
+
it { is_expected.to respond_with(:success) }
end
context "valid branch, invalid path" do
let(:id) { 'master/invalid-path/' }
- it { is_expected.to respond_with(:not_found) }
+
+ it 'redirects' do
+ expect(subject)
+ .to redirect_to("/#{project.full_path}/tree/master")
+ end
end
context "invalid branch, valid path" do
let(:id) { 'invalid-branch/encoding/' }
+
it { is_expected.to respond_with(:not_found) }
end
context "valid empty branch, invalid path" do
let(:id) { 'empty-branch/invalid-path/' }
- it { is_expected.to respond_with(:not_found) }
+
+ it 'redirects' do
+ expect(subject)
+ .to redirect_to("/#{project.full_path}/tree/empty-branch")
+ end
end
context "valid empty branch" do
let(:id) { 'empty-branch' }
+
it { is_expected.to respond_with(:success) }
end
context "invalid SHA commit ID" do
let(:id) { 'ff39438/.gitignore' }
+
it { is_expected.to respond_with(:not_found) }
end
context "valid SHA commit ID" do
let(:id) { '6d39438' }
+
it { is_expected.to respond_with(:success) }
end
context "valid SHA commit ID with path" do
let(:id) { '6d39438/.gitignore' }
+
it { expect(response).to have_gitlab_http_status(302) }
end
end
@@ -108,6 +123,7 @@ describe Projects::TreeController do
context 'redirect to blob' do
let(:id) { 'master/README.md' }
+
it 'redirects' do
redirect_url = "/#{project.full_path}/blob/master/README.md"
expect(subject)
diff --git a/spec/controllers/projects/usage_ping_controller_spec.rb b/spec/controllers/projects/usage_ping_controller_spec.rb
new file mode 100644
index 00000000000..a9abbff160d
--- /dev/null
+++ b/spec/controllers/projects/usage_ping_controller_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::UsagePingController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe 'POST #web_ide_clientside_preview' do
+ subject { post :web_ide_clientside_preview, params: { namespace_id: project.namespace, project_id: project } }
+
+ before do
+ sign_in(user) if user
+ end
+
+ context 'when web ide clientside preview is enabled' do
+ before do
+ stub_application_setting(web_ide_clientside_preview_enabled: true)
+ end
+
+ context 'when the user is not authenticated' do
+ let(:user) { nil }
+
+ it 'returns 302' do
+ subject
+
+ expect(response).to have_gitlab_http_status(302)
+ end
+ end
+
+ context 'when the user does not have access to the project' do
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when the user has access to the project' do
+ let(:user) { project.owner }
+
+ it 'increments the counter' do
+ expect do
+ subject
+ end.to change { Gitlab::UsageDataCounters::WebIdeCounter.total_previews_count }.by(1)
+ end
+ end
+ end
+
+ context 'when web ide clientside preview is not enabled' do
+ let(:user) { project.owner }
+
+ before do
+ stub_application_setting(web_ide_clientside_preview_enabled: false)
+ end
+
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index e0df9556eb8..ff0259cd40d 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -653,7 +653,7 @@ describe ProjectsController do
describe "#destroy" do
let(:admin) { create(:admin) }
- it "redirects to the dashboard" do
+ it "redirects to the dashboard", :sidekiq_might_not_need_inline do
controller.instance_variable_set(:@project, project)
sign_in(admin)
@@ -674,7 +674,7 @@ describe ProjectsController do
target_project: project)
end
- it "closes all related merge requests" do
+ it "closes all related merge requests", :sidekiq_might_not_need_inline do
project.merge_requests << merge_request
sign_in(admin)
@@ -927,6 +927,30 @@ describe ProjectsController do
expect(json_response['body']).to match(/\!#{merge_request.iid} \(closed\)/)
end
end
+
+ context 'when path parameter is provided' do
+ let(:project_with_repo) { create(:project, :repository) }
+ let(:preview_markdown_params) do
+ {
+ namespace_id: project_with_repo.namespace,
+ id: project_with_repo,
+ text: "![](./logo-white.png)\n",
+ path: 'files/images/README.md'
+ }
+ end
+
+ before do
+ project_with_repo.add_maintainer(user)
+ end
+
+ it 'renders JSON body with image links expanded' do
+ expanded_path = "/#{project_with_repo.full_path}/raw/master/files/images/logo-white.png"
+
+ post :preview_markdown, params: preview_markdown_params
+
+ expect(json_response['body']).to include(expanded_path)
+ end
+ end
end
describe '#ensure_canonical_path' do
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index ebeed94c274..c5cfdd32619 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -9,6 +9,51 @@ describe RegistrationsController do
stub_feature_flags(invisible_captcha: false)
end
+ describe '#new' do
+ subject { get :new }
+
+ context 'with the experimental signup flow enabled and the user is part of the experimental group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: true)
+ end
+
+ it 'tracks the event with the right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::SignUpFlow',
+ 'start',
+ label: anything,
+ property: 'experimental_group'
+ )
+ subject
+ end
+
+ it 'renders new template and sets the resource variable' do
+ expect(subject).to render_template(:new)
+ expect(response).to have_gitlab_http_status(200)
+ expect(assigns(:resource)).to be_a(User)
+ end
+ end
+
+ context 'with the experimental signup flow enabled and the user is part of the control group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: false)
+ end
+
+ it 'does not track the event' do
+ expect(Gitlab::Tracking).not_to receive(:event)
+ subject
+ end
+
+ it 'renders new template and sets the resource variable' do
+ subject
+ expect(response).to have_gitlab_http_status(302)
+ expect(response).to redirect_to(new_user_session_path(anchor: 'register-pane'))
+ end
+ end
+ end
+
describe '#create' do
let(:base_user_params) { { name: 'new_user', username: 'new_username', email: 'new@user.com', password: 'Any_password' } }
let(:user_params) { { user: base_user_params } }
@@ -217,6 +262,37 @@ describe RegistrationsController do
end
end
+ describe 'tracking data' do
+ context 'with the experimental signup flow enabled and the user is part of the control group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: false)
+ end
+
+ it 'tracks the event with the right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::SignUpFlow',
+ 'end',
+ label: anything,
+ property: 'control_group'
+ )
+ post :create, params: user_params
+ end
+ end
+
+ context 'with the experimental signup flow enabled and the user is part of the experimental group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: true)
+ end
+
+ it 'does not track the event' do
+ expect(Gitlab::Tracking).not_to receive(:event)
+ post :create, params: user_params
+ end
+ end
+ end
+
it "logs a 'User Created' message" do
stub_feature_flags(registrations_recaptcha: false)
@@ -304,4 +380,22 @@ describe RegistrationsController do
end
end
end
+
+ describe '#update_registration' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: true)
+ sign_in(create(:user))
+ end
+
+ it 'tracks the event with the right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::SignUpFlow',
+ 'end',
+ label: anything,
+ property: 'experimental_group'
+ )
+ patch :update_registration, params: { user: { name: 'New name', role: 'software_developer', setup_for_company: 'false' } }
+ end
+ end
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 2108cf1c8ae..1e47df150b4 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe SessionsController do
include DeviseHelpers
+ include LdapHelpers
describe '#new' do
before do
@@ -34,6 +35,63 @@ describe SessionsController do
end
end
end
+
+ context 'with LDAP enabled' do
+ before do
+ stub_ldap_setting(enabled: true)
+ end
+
+ it 'assigns ldap_servers' do
+ get(:new)
+
+ expect(assigns[:ldap_servers].first.to_h).to include('label' => 'ldap', 'provider_name' => 'ldapmain')
+ end
+
+ context 'with sign_in disabled' do
+ before do
+ stub_ldap_setting(prevent_ldap_sign_in: true)
+ end
+
+ it 'assigns no ldap_servers' do
+ get(:new)
+
+ expect(assigns[:ldap_servers]).to eq []
+ end
+ end
+ end
+
+ describe 'tracking data' do
+ context 'when the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(signup_flow: true)
+ end
+
+ it 'doesn\'t pass tracking parameters to the frontend' do
+ get(:new)
+ expect(Gon.tracking_data).to be_nil
+ end
+ end
+
+ context 'with the experimental signup flow enabled and the user is part of the control group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: false)
+ allow_any_instance_of(described_class).to receive(:experimentation_subject_id).and_return('uuid')
+ end
+
+ it 'passes the right tracking parameters to the frontend' do
+ get(:new)
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Growth::Acquisition::Experiment::SignUpFlow',
+ action: 'start',
+ label: 'uuid',
+ property: 'control_group'
+ }
+ )
+ end
+ end
+ end
end
describe '#create' do
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index e892c736c69..054d448c28d 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -251,7 +251,9 @@ describe SnippetsController do
context 'when the snippet is spam' do
before do
- allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive(:spam?).and_return(true)
+ end
end
context 'when the snippet is private' do
@@ -323,7 +325,9 @@ describe SnippetsController do
context 'when the snippet is spam' do
before do
- allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive(:spam?).and_return(true)
+ end
end
context 'when the snippet is private' do
@@ -431,7 +435,9 @@ describe SnippetsController do
let(:snippet) { create(:personal_snippet, :public, author: user) }
before do
- allow_any_instance_of(AkismetService).to receive_messages(submit_spam: true)
+ allow_next_instance_of(AkismetService) do |instance|
+ allow(instance).to receive_messages(submit_spam: true)
+ end
stub_application_setting(akismet_enabled: true)
end
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index 5566df0c216..bbbb9691f53 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -174,7 +174,9 @@ describe UsersController do
let(:user) { create(:user) }
before do
- allow_any_instance_of(User).to receive(:contributed_projects_ids).and_return([project.id])
+ allow_next_instance_of(User) do |instance|
+ allow(instance).to receive(:contributed_projects_ids).and_return([project.id])
+ end
sign_in(user)
project.add_developer(user)
@@ -348,6 +350,48 @@ describe UsersController do
end
end
+ describe 'GET #suggests' do
+ context 'when user exists' do
+ it 'returns JSON indicating the user exists and a suggestion' do
+ get :suggests, params: { username: user.username }
+
+ expected_json = { exists: true, suggests: ["#{user.username}1"] }.to_json
+ expect(response.body).to eq(expected_json)
+ end
+
+ context 'when the casing is different' do
+ let(:user) { create(:user, username: 'CamelCaseUser') }
+
+ it 'returns JSON indicating the user exists and a suggestion' do
+ get :suggests, params: { username: user.username.downcase }
+
+ expected_json = { exists: true, suggests: ["#{user.username.downcase}1"] }.to_json
+ expect(response.body).to eq(expected_json)
+ end
+ end
+ end
+
+ context 'when the user does not exist' do
+ it 'returns JSON indicating the user does not exist' do
+ get :suggests, params: { username: 'foo' }
+
+ expected_json = { exists: false, suggests: [] }.to_json
+ expect(response.body).to eq(expected_json)
+ end
+
+ context 'when a user changed their username' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
+
+ it 'returns JSON indicating a user by that username does not exist' do
+ get :suggests, params: { username: 'old-username' }
+
+ expected_json = { exists: false, suggests: [] }.to_json
+ expect(response.body).to eq(expected_json)
+ end
+ end
+ end
+ end
+
describe '#ensure_canonical_path' do
before do
sign_in(user)
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 53f4a261092..e8b30868801 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -13,7 +13,7 @@ describe 'Database schema' do
# EE: edit the ee/spec/db/schema_support.rb
IGNORED_FK_COLUMNS = {
abuse_reports: %w[reporter_id user_id],
- application_settings: %w[performance_bar_allowed_group_id slack_app_id snowplow_site_id],
+ application_settings: %w[performance_bar_allowed_group_id slack_app_id snowplow_app_id eks_account_id eks_access_key_id],
approvers: %w[target_id user_id],
approvals: %w[user_id],
approver_groups: %w[target_id],
@@ -120,9 +120,55 @@ describe 'Database schema' do
end
end
+ # These pre-existing enums have limits > 2 bytes
+ IGNORED_LIMIT_ENUMS = {
+ 'Analytics::CycleAnalytics::GroupStage' => %w[start_event_identifier end_event_identifier],
+ 'Analytics::CycleAnalytics::ProjectStage' => %w[start_event_identifier end_event_identifier],
+ 'Ci::Bridge' => %w[failure_reason],
+ 'Ci::Build' => %w[failure_reason],
+ 'Ci::BuildMetadata' => %w[timeout_source],
+ 'Ci::BuildTraceChunk' => %w[data_store],
+ 'Ci::JobArtifact' => %w[file_type],
+ 'Ci::Pipeline' => %w[source config_source failure_reason],
+ 'Ci::Runner' => %w[access_level],
+ 'Ci::Stage' => %w[status],
+ 'Clusters::Applications::Ingress' => %w[ingress_type],
+ 'Clusters::Cluster' => %w[platform_type provider_type],
+ 'CommitStatus' => %w[failure_reason],
+ 'GenericCommitStatus' => %w[failure_reason],
+ 'Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetric' => %w[group],
+ 'InternalId' => %w[usage],
+ 'List' => %w[list_type],
+ 'NotificationSetting' => %w[level],
+ 'Project' => %w[auto_cancel_pending_pipelines],
+ 'ProjectAutoDevops' => %w[deploy_strategy],
+ 'PrometheusMetric' => %w[group],
+ 'ResourceLabelEvent' => %w[action],
+ 'User' => %w[layout dashboard project_view],
+ 'UserCallout' => %w[feature_name],
+ 'PrometheusAlert' => %w[operator]
+ }.freeze
+
+ context 'for enums' do
+ ApplicationRecord.descendants.each do |model|
+ describe model do
+ let(:ignored_enums) { ignored_limit_enums(model.name) }
+ let(:enums) { model.defined_enums.keys - ignored_enums }
+
+ it 'uses smallint for enums' do
+ expect(model).to use_smallint_for_enums(enums)
+ end
+ end
+ end
+ end
+
private
def ignored_fk_columns(column)
IGNORED_FK_COLUMNS.fetch(column, [])
end
+
+ def ignored_limit_enums(model)
+ IGNORED_LIMIT_ENUMS.fetch(model, [])
+ end
end
diff --git a/spec/dependencies/omniauth_saml_spec.rb b/spec/dependencies/omniauth_saml_spec.rb
index 8a685648c71..e0ea9c38e69 100644
--- a/spec/dependencies/omniauth_saml_spec.rb
+++ b/spec/dependencies/omniauth_saml_spec.rb
@@ -14,7 +14,9 @@ describe 'processing of SAMLResponse in dependencies' do
before do
allow(saml_strategy).to receive(:session).and_return(session_mock)
- allow_any_instance_of(OneLogin::RubySaml::Response).to receive(:is_valid?).and_return(true)
+ allow_next_instance_of(OneLogin::RubySaml::Response) do |instance|
+ allow(instance).to receive(:is_valid?).and_return(true)
+ end
saml_strategy.send(:handle_response, mock_saml_response, {}, settings ) { }
end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index fefd89728e6..e2ec9d496bc 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
+ # TODO: we can remove this factory in favour of :ci_pipeline
factory :ci_empty_pipeline, class: Ci::Pipeline do
source { :push }
ref { 'master' }
@@ -10,20 +11,6 @@ FactoryBot.define do
project
- factory :ci_pipeline_without_jobs do
- after(:build) do |pipeline|
- pipeline.instance_variable_set(:@ci_yaml_file, YAML.dump({}))
- end
- end
-
- factory :ci_pipeline_with_one_job do
- after(:build) do |pipeline|
- allow(pipeline).to receive(:ci_yaml_file) do
- pipeline.instance_variable_set(:@ci_yaml_file, YAML.dump({ rspec: { script: "ls" } }))
- end
- end
- end
-
# Persist merge request head_pipeline_id
# on pipeline factories to avoid circular references
transient { head_pipeline_of { nil } }
@@ -34,24 +21,8 @@ FactoryBot.define do
end
factory :ci_pipeline do
- transient { config { nil } }
-
- after(:build) do |pipeline, evaluator|
- if evaluator.config
- pipeline.instance_variable_set(:@ci_yaml_file, YAML.dump(evaluator.config))
-
- # Populates pipeline with errors
- pipeline.config_processor if evaluator.config
- else
- pipeline.instance_variable_set(:@ci_yaml_file, File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')))
- end
- end
-
trait :invalid do
- config do
- { rspec: nil }
- end
-
+ yaml_errors { 'invalid YAML' }
failure_reason { :config_error }
end
@@ -95,6 +66,17 @@ FactoryBot.define do
end
end
+ trait :with_exposed_artifacts do
+ status { :success }
+
+ after(:build) do |pipeline, evaluator|
+ pipeline.builds << build(:ci_build, :artifacts,
+ pipeline: pipeline,
+ project: pipeline.project,
+ options: { artifacts: { expose_as: 'the artifact', paths: ['ci_artifacts.txt'] } })
+ end
+ end
+
trait :with_job do
after(:build) do |pipeline, evaluator|
pipeline.builds << build(:ci_build, pipeline: pipeline, project: pipeline.project)
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index c7ec7c11743..0e59f8cb9ec 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -79,6 +79,15 @@ FactoryBot.define do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
+ factory :clusters_applications_elastic_stack, class: Clusters::Applications::ElasticStack do
+ cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+ end
+
+ factory :clusters_applications_crossplane, class: Clusters::Applications::Crossplane do
+ stack { 'gcp' }
+ cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+ end
+
factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 63f33633a3c..609e7e20187 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -93,5 +93,25 @@ FactoryBot.define do
trait :not_managed do
managed { false }
end
+
+ trait :cleanup_not_started do
+ cleanup_status { 1 }
+ end
+
+ trait :cleanup_uninstalling_applications do
+ cleanup_status { 2 }
+ end
+
+ trait :cleanup_removing_project_namespaces do
+ cleanup_status { 3 }
+ end
+
+ trait :cleanup_removing_service_account do
+ cleanup_status { 4 }
+ end
+
+ trait :cleanup_errored do
+ cleanup_status { 5 }
+ end
end
end
diff --git a/spec/factories/clusters/platforms/kubernetes.rb b/spec/factories/clusters/platforms/kubernetes.rb
index 2757498e36b..dbcb838e9da 100644
--- a/spec/factories/clusters/platforms/kubernetes.rb
+++ b/spec/factories/clusters/platforms/kubernetes.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :cluster_platform_kubernetes, class: Clusters::Platforms::Kubernetes do
- cluster
+ association :cluster, platform_type: :kubernetes, provider_type: :user
namespace { nil }
api_url { 'https://kubernetes.example.com' }
token { 'a' * 40 }
diff --git a/spec/factories/clusters/providers/aws.rb b/spec/factories/clusters/providers/aws.rb
index f4bc61455c5..e4b10aa5f33 100644
--- a/spec/factories/clusters/providers/aws.rb
+++ b/spec/factories/clusters/providers/aws.rb
@@ -2,8 +2,7 @@
FactoryBot.define do
factory :cluster_provider_aws, class: Clusters::Providers::Aws do
- cluster
- created_by_user factory: :user
+ association :cluster, platform_type: :kubernetes, provider_type: :aws
role_arn { 'arn:aws:iam::123456789012:role/role-name' }
vpc_id { 'vpc-00000000000000000' }
diff --git a/spec/factories/clusters/providers/gcp.rb b/spec/factories/clusters/providers/gcp.rb
index 83b65dc8087..216c4d4fa31 100644
--- a/spec/factories/clusters/providers/gcp.rb
+++ b/spec/factories/clusters/providers/gcp.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :cluster_provider_gcp, class: Clusters::Providers::Gcp do
- cluster
+ association :cluster, platform_type: :kubernetes, provider_type: :gcp
gcp_project_id { 'test-gcp-project' }
trait :scheduled do
diff --git a/spec/factories/commit_statuses.rb b/spec/factories/commit_statuses.rb
index 3ce71a1b05d..5d635d93ff2 100644
--- a/spec/factories/commit_statuses.rb
+++ b/spec/factories/commit_statuses.rb
@@ -7,7 +7,7 @@ FactoryBot.define do
stage_idx { 0 }
status { 'success' }
description { 'commit status'}
- pipeline factory: :ci_pipeline_with_one_job
+ pipeline factory: :ci_pipeline
started_at { 'Tue, 26 Jan 2016 08:21:42 +0100'}
finished_at { 'Tue, 26 Jan 2016 08:23:42 +0100'}
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index f4da206990c..f8738d28d83 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -51,6 +51,10 @@ FactoryBot.define do
finished_at { Time.now }
end
+ trait :created do
+ status { :created }
+ end
+
# This trait hooks the state maechine's events
trait :succeed do
after(:create) do |deployment, evaluator|
diff --git a/spec/factories/error_tracking/detailed_error.rb b/spec/factories/error_tracking/detailed_error.rb
new file mode 100644
index 00000000000..cf7de2ece96
--- /dev/null
+++ b/spec/factories/error_tracking/detailed_error.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :detailed_error_tracking_error, class: Gitlab::ErrorTracking::DetailedError do
+ id { 'id' }
+ title { 'title' }
+ type { 'error' }
+ user_count { 1 }
+ count { 2 }
+ first_seen { Time.now }
+ last_seen { Time.now }
+ message { 'message' }
+ culprit { 'culprit' }
+ external_url { 'http://example.com/id' }
+ external_base_url { 'http://example.com' }
+ project_id { 'project1' }
+ project_name { 'project name' }
+ project_slug { 'project_name' }
+ short_id { 'ID' }
+ status { 'unresolved' }
+ frequency { [] }
+ first_release_last_commit { '68c914da9' }
+ last_release_last_commit { '9ad419c86' }
+ first_release_short_version { 'abc123' }
+ last_release_short_version { 'abc123' }
+
+ skip_create
+ end
+end
diff --git a/spec/factories/error_tracking/error_event.rb b/spec/factories/error_tracking/error_event.rb
new file mode 100644
index 00000000000..44c127e7bf5
--- /dev/null
+++ b/spec/factories/error_tracking/error_event.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :error_tracking_error_event, class: Gitlab::ErrorTracking::ErrorEvent do
+ issue_id { 'id' }
+ date_received { Time.now.iso8601 }
+ stack_trace_entries do
+ {
+ 'stacktrace' =>
+ {
+ 'frames' => [{ 'file' => 'test.rb' }]
+ }
+ }
+ end
+
+ skip_create
+ end
+end
diff --git a/spec/factories/grafana_integrations.rb b/spec/factories/grafana_integrations.rb
index c19417f5a90..ae819ca828c 100644
--- a/spec/factories/grafana_integrations.rb
+++ b/spec/factories/grafana_integrations.rb
@@ -3,7 +3,8 @@
FactoryBot.define do
factory :grafana_integration, class: GrafanaIntegration do
project
- grafana_url { 'https://grafana.com' }
+ grafana_url { 'https://grafana.example.com' }
token { SecureRandom.hex(10) }
+ enabled { true }
end
end
diff --git a/spec/factories/group_group_links.rb b/spec/factories/group_group_links.rb
new file mode 100644
index 00000000000..0711a15b8dd
--- /dev/null
+++ b/spec/factories/group_group_links.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :group_group_link do
+ shared_group { create(:group) }
+ shared_with_group { create(:group) }
+ group_access { GroupMember::DEVELOPER }
+ end
+end
diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb
index 46910078ee5..24c12a66599 100644
--- a/spec/factories/issues.rb
+++ b/spec/factories/issues.rb
@@ -6,6 +6,7 @@ FactoryBot.define do
project
author { project.creator }
updated_by { author }
+ relative_position { RelativePositioning::START_POSITION }
trait :confidential do
confidential { true }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index d16e0c10671..42248dc1165 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -100,6 +100,7 @@ FactoryBot.define do
auto_merge_enabled { true }
auto_merge_strategy { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
merge_user { author }
+ merge_params { { sha: diff_head_sha } }
end
trait :remove_source_branch do
@@ -120,6 +121,18 @@ FactoryBot.define do
end
end
+ trait :with_exposed_artifacts do
+ after(:build) do |merge_request|
+ merge_request.head_pipeline = build(
+ :ci_pipeline,
+ :success,
+ :with_exposed_artifacts,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+ end
+
trait :with_legacy_detached_merge_request_pipeline do
after(:create) do |merge_request|
merge_request.pipelines_for_merge_request << create(:ci_pipeline,
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 9477eeb18d4..2608f717f1c 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -234,10 +234,7 @@ FactoryBot.define do
trait :broken_repo do
after(:create) do |project|
- raise "Failed to create repository!" unless project.create_repository
-
- project.gitlab_shell.rm_directory(project.repository_storage,
- File.join("#{project.disk_path}.git", 'refs'))
+ TestEnv.rm_storage_dir(project.repository_storage, "#{project.disk_path}.git/refs")
end
end
diff --git a/spec/factories/zoom_meetings.rb b/spec/factories/zoom_meetings.rb
new file mode 100644
index 00000000000..b280deca012
--- /dev/null
+++ b/spec/factories/zoom_meetings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :zoom_meeting do
+ project { issue.project }
+ issue
+ url { 'https://zoom.us/j/123456789' }
+ issue_status { :added }
+
+ trait :added_to_issue do
+ issue_status { :added }
+ end
+
+ trait :removed_from_issue do
+ issue_status { :removed }
+ end
+ end
+end
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 48fff9e57d3..93051a8a355 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -51,5 +51,29 @@ describe "Admin::AbuseReports", :js do
end
end
end
+
+ describe 'filtering by user' do
+ let!(:user2) { create(:user) }
+ let!(:abuse_report) { create(:abuse_report, user: user) }
+ let!(:abuse_report_2) { create(:abuse_report, user: user2) }
+
+ it 'shows only single user report' do
+ visit admin_abuse_reports_path
+
+ page.within '.filter-form' do
+ click_button 'User'
+ wait_for_requests
+
+ page.within '.dropdown-menu-user' do
+ click_link user2.name
+ end
+
+ wait_for_requests
+ end
+
+ expect(page).to have_content(user2.name)
+ expect(page).not_to have_content(user.name)
+ end
+ end
end
end
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index 058e548208f..7c40ac5bde3 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -73,8 +73,9 @@ describe "Admin::Projects" do
before do
create(:group, name: 'Web')
- allow_any_instance_of(Projects::TransferService)
- .to receive(:move_uploads_to_new_namespace).and_return(true)
+ allow_next_instance_of(Projects::TransferService) do |instance|
+ allow(instance).to receive(:move_uploads_to_new_namespace).and_return(true)
+ end
end
it 'transfers project to group web', :js do
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index e1c9364067a..99a6165cfc9 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
include StubENV
include TermsHelper
+ include MobileHelpers
let(:admin) { create(:admin) }
@@ -450,6 +451,32 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
expect(page).to have_link(text: 'Support', href: new_support_url)
end
end
+
+ it 'Shows admin dashboard links on bigger screen' do
+ visit root_dashboard_path
+
+ page.within '.navbar' do
+ expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+ end
+
+ it 'Relocates admin dashboard links to dropdown list on smaller screen', :js do
+ resize_screen_xs
+ visit root_dashboard_path
+
+ page.within '.navbar' do
+ expect(page).not_to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).not_to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+
+ find('.header-more').click
+
+ page.within '.navbar' do
+ expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+ end
end
context 'when in admin_mode' do
@@ -462,7 +489,7 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
it 'can leave admin mode' do
page.within('.navbar-sub-nav') do
# Select first, link is also included in mobile view list
- click_on 'Leave admin mode', match: :first
+ click_on 'Leave Admin Mode', match: :first
expect(page).to have_link(href: new_admin_session_path)
end
@@ -481,7 +508,7 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
before do
page.within('.navbar-sub-nav') do
# Select first, link is also included in mobile view list
- click_on 'Leave admin mode', match: :first
+ click_on 'Leave Admin Mode', match: :first
end
end
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index 29f29e58917..0c8cd895c00 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -179,7 +179,9 @@ describe "Admin::Users" do
end
it "calls send mail" do
- expect_any_instance_of(NotificationService).to receive(:new_user)
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:new_user)
+ end
click_button "Create user"
end
diff --git a/spec/features/admin/clusters/eks_spec.rb b/spec/features/admin/clusters/eks_spec.rb
new file mode 100644
index 00000000000..b262db1ad7c
--- /dev/null
+++ b/spec/features/admin/clusters/eks_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Instance-level AWS EKS Cluster', :js do
+ let(:user) { create(:admin) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when user does not have a cluster and visits group clusters page' do
+ before do
+ visit admin_clusters_path
+
+ click_link 'Add Kubernetes cluster'
+ end
+
+ context 'when user creates a cluster on AWS EKS' do
+ before do
+ click_link 'Amazon EKS'
+ end
+
+ it 'user sees a form to create an EKS cluster' do
+ expect(page).to have_content('Create new Cluster on EKS')
+ end
+ end
+ end
+end
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 235b6d0fd40..bac5c9f568e 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -134,11 +134,9 @@ describe 'Contributions Calendar', :js do
shared_examples 'a day with activity' do |contribution_count:|
include_context 'visit user page'
- it 'displays calendar activity square color for 1 contribution' do
+ it 'displays calendar activity square for 1 contribution', :sidekiq_might_not_need_inline do
expect(find('#js-overview')).to have_selector(get_cell_color_selector(contribution_count), count: 1)
- end
- it 'displays calendar activity square on the correct date' do
today = Date.today.strftime(date_format)
expect(find('#js-overview')).to have_selector(get_cell_date_selector(contribution_count, today), count: 1)
end
@@ -154,7 +152,7 @@ describe 'Contributions Calendar', :js do
describe 'issue title is shown on activity page' do
include_context 'visit user page'
- it 'displays calendar activity log' do
+ it 'displays calendar activity log', :sidekiq_might_not_need_inline do
expect(find('#js-overview .overview-content-list .event-target-title')).to have_content issue_title
end
end
@@ -186,11 +184,11 @@ describe 'Contributions Calendar', :js do
end
include_context 'visit user page'
- it 'displays calendar activity squares for both days' do
+ it 'displays calendar activity squares for both days', :sidekiq_might_not_need_inline do
expect(find('#js-overview')).to have_selector(get_cell_color_selector(1), count: 2)
end
- it 'displays calendar activity square for yesterday' do
+ it 'displays calendar activity square for yesterday', :sidekiq_might_not_need_inline do
yesterday = Date.yesterday.strftime(date_format)
expect(find('#js-overview')).to have_selector(get_cell_date_selector(1, yesterday), count: 1)
end
diff --git a/spec/features/clusters/installing_applications_shared_examples.rb b/spec/features/clusters/installing_applications_shared_examples.rb
index cb8fd8c607c..988cd228c1c 100644
--- a/spec/features/clusters/installing_applications_shared_examples.rb
+++ b/spec/features/clusters/installing_applications_shared_examples.rb
@@ -178,6 +178,37 @@ shared_examples "installing applications on a cluster" do
end
end
+ context 'when user installs Elastic Stack' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+
+ create(:clusters_applications_helm, :installed, cluster: cluster)
+ create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1', cluster: cluster)
+
+ page.within('.js-cluster-application-row-elastic_stack') do
+ click_button 'Install'
+ end
+ end
+
+ it 'shows status transition' do
+ page.within('.js-cluster-application-row-elastic_stack') do
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_elastic_stack.make_installing!
+
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_elastic_stack.make_installed!
+
+ expect(page).to have_css('.js-cluster-application-uninstall-button', exact_text: 'Uninstall')
+ end
+
+ expect(page).to have_content('Elastic Stack was successfully installed on your Kubernetes cluster')
+ end
+ end
+
context 'when user installs Ingress' do
before do
allow(ClusterInstallAppWorker).to receive(:perform_async)
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index 96d8da845cb..f538df89fd3 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -102,7 +102,7 @@ describe 'Commits' do
end
describe 'Cancel all builds' do
- it 'cancels commit', :js do
+ it 'cancels commit', :js, :sidekiq_might_not_need_inline do
visit pipeline_path(pipeline)
click_on 'Cancel running'
expect(page).to have_content 'canceled'
@@ -110,7 +110,7 @@ describe 'Commits' do
end
describe 'Cancel build' do
- it 'cancels build', :js do
+ it 'cancels build', :js, :sidekiq_might_not_need_inline do
visit pipeline_path(pipeline)
find('.js-btn-cancel-pipeline').click
expect(page).to have_content 'canceled'
@@ -157,39 +157,6 @@ describe 'Commits' do
end
end
end
-
- describe '.gitlab-ci.yml not found warning' do
- before do
- project.add_reporter(user)
- end
-
- context 'ci builds enabled' do
- it 'does not show warning' do
- visit pipeline_path(pipeline)
-
- expect(page).not_to have_content '.gitlab-ci.yml not found in this commit'
- end
-
- it 'shows warning' do
- stub_ci_pipeline_yaml_file(nil)
-
- visit pipeline_path(pipeline)
-
- expect(page).to have_content '.gitlab-ci.yml not found in this commit'
- end
- end
-
- context 'ci builds disabled' do
- it 'does not show warning' do
- stub_ci_builds_disabled
- stub_ci_pipeline_yaml_file(nil)
-
- visit pipeline_path(pipeline)
-
- expect(page).not_to have_content '.gitlab-ci.yml not found in this commit'
- end
- end
- end
end
context 'viewing commits for a branch' do
diff --git a/spec/features/container_registry_spec.rb b/spec/features/container_registry_spec.rb
index 03a2402a2d6..28b68e699e8 100644
--- a/spec/features/container_registry_spec.rb
+++ b/spec/features/container_registry_spec.rb
@@ -42,7 +42,7 @@ describe 'Container Registry', :js do
expect(page).to have_content('my/image')
end
- it 'user removes entire container repository' do
+ it 'user removes entire container repository', :sidekiq_might_not_need_inline do
visit_container_registry
expect_any_instance_of(ContainerRepository).to receive(:delete_tags!).and_return(true)
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 07f0864fb3b..0fc4841ee0e 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -40,7 +40,9 @@ describe 'Cycle Analytics', :js do
context "when there's cycle analytics data" do
before do
- allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
+ allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
+ allow(instance).to receive(:issues).and_return([issue])
+ end
project.add_maintainer(user)
@build = create_cycle(user, project, issue, mr, milestone, pipeline)
@@ -56,7 +58,7 @@ describe 'Cycle Analytics', :js do
expect(deploys_counter).to have_content('1')
end
- it 'shows data on each stage' do
+ it 'shows data on each stage', :sidekiq_might_not_need_inline do
expect_issue_to_be_present
click_stage('Plan')
@@ -99,7 +101,9 @@ describe 'Cycle Analytics', :js do
project.add_developer(user)
project.add_guest(guest)
- allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
+ allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
+ allow(instance).to receive(:issues).and_return([issue])
+ end
create_cycle(user, project, issue, mr, milestone, pipeline)
deploy_master(user, project)
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 973d5a2dcfc..f10cdf6da1e 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -216,8 +216,7 @@ describe 'Dashboard Projects' do
expect(page).to have_selector('.merge-request-form')
expect(current_path).to eq project_new_merge_request_path(project)
expect(find('#merge_request_target_project_id', visible: false).value).to eq project.id.to_s
- expect(find('input#merge_request_source_branch', visible: false).value).to eq 'feature'
- expect(find('input#merge_request_target_branch', visible: false).value).to eq 'master'
+ expect(page).to have_content "From feature into master"
end
end
diff --git a/spec/features/explore/groups_spec.rb b/spec/features/explore/groups_spec.rb
index 81c77a29ecd..eff63d6a788 100644
--- a/spec/features/explore/groups_spec.rb
+++ b/spec/features/explore/groups_spec.rb
@@ -26,6 +26,10 @@ describe 'Explore Groups', :js do
end
end
+ before do
+ stub_feature_flags({ vue_issuables_list: { enabled: false, thing: group } })
+ end
+
shared_examples 'renders public and internal projects' do
it do
visit_page
diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb
index 00fa85930b1..c499fac6bc0 100644
--- a/spec/features/global_search_spec.rb
+++ b/spec/features/global_search_spec.rb
@@ -21,7 +21,9 @@ describe 'Global search' do
describe 'I search through the issues and I see pagination' do
before do
- allow_any_instance_of(Gitlab::SearchResults).to receive(:per_page).and_return(1)
+ allow_next_instance_of(Gitlab::SearchResults) do |instance|
+ allow(instance).to receive(:per_page).and_return(1)
+ end
create_list(:issue, 2, project: project, title: 'initial')
end
diff --git a/spec/features/groups/clusters/eks_spec.rb b/spec/features/groups/clusters/eks_spec.rb
new file mode 100644
index 00000000000..b6942304c22
--- /dev/null
+++ b/spec/features/groups/clusters/eks_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Group AWS EKS Cluster', :js do
+ let(:group) { create(:group) }
+ let(:user) { create(:user) }
+
+ before do
+ group.add_maintainer(user)
+ gitlab_sign_in(user)
+
+ allow(Groups::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
+ allow_any_instance_of(Clusters::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute)
+ allow_any_instance_of(Clusters::Cluster).to receive(:retrieve_connection_status).and_return(:connected)
+ end
+
+ context 'when user does not have a cluster and visits group clusters page' do
+ before do
+ visit group_clusters_path(group)
+
+ click_link 'Add Kubernetes cluster'
+ end
+
+ context 'when user creates a cluster on AWS EKS' do
+ before do
+ click_link 'Amazon EKS'
+ end
+
+ it 'user sees a form to create an EKS cluster' do
+ expect(page).to have_content('Create new Cluster on EKS')
+ end
+ end
+ end
+end
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 8891866c1f8..e06f2efe183 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -13,8 +13,12 @@ describe 'User Cluster', :js do
gitlab_sign_in(user)
allow(Groups::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
- allow_any_instance_of(Clusters::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute)
- allow_any_instance_of(Clusters::Cluster).to receive(:retrieve_connection_status).and_return(:connected)
+ allow_next_instance_of(Clusters::Kubernetes::CreateOrUpdateNamespaceService) do |instance|
+ allow(instance).to receive(:execute)
+ end
+ allow_next_instance_of(Clusters::Cluster) do |instance|
+ allow(instance).to receive(:retrieve_connection_status).and_return(:connected)
+ end
end
context 'when user does not have a cluster and visits cluster index page' do
diff --git a/spec/features/groups/group_page_with_external_authorization_service_spec.rb b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
index c05c3f4f3d6..823c8cc8fad 100644
--- a/spec/features/groups/group_page_with_external_authorization_service_spec.rb
+++ b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
@@ -15,7 +15,7 @@ describe 'The group page' do
def expect_all_sidebar_links
within('.nav-sidebar') do
- expect(page).to have_link('Overview')
+ expect(page).to have_link('Group overview')
expect(page).to have_link('Details')
expect(page).to have_link('Activity')
expect(page).to have_link('Issues')
@@ -44,7 +44,7 @@ describe 'The group page' do
visit group_path(group)
within('.nav-sidebar') do
- expect(page).to have_link('Overview')
+ expect(page).to have_link('Group overview')
expect(page).to have_link('Details')
expect(page).not_to have_link('Activity')
expect(page).not_to have_link('Contribution Analytics')
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index 5d87c9d7be8..b9b233026fd 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -11,6 +11,10 @@ describe 'Group issues page' do
let(:project_with_issues_disabled) { create(:project, :issues_disabled, group: group) }
let(:path) { issues_group_path(group) }
+ before do
+ stub_feature_flags({ vue_issuables_list: { enabled: false, thing: group } })
+ end
+
context 'with shared examples' do
let(:issuable) { create(:issue, project: project, title: "this is my created issuable")}
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index 17738905e8d..65ef0af5be3 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe 'Group milestones' do
- let(:group) { create(:group) }
- let!(:project) { create(:project_empty_repo, group: group) }
- let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project_empty_repo, group: group) }
+ let_it_be(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
around do |example|
Timecop.freeze { example.run }
@@ -71,9 +71,9 @@ describe 'Group milestones' do
end
context 'when milestones exists' do
- let!(:other_project) { create(:project_empty_repo, group: group) }
+ let_it_be(:other_project) { create(:project_empty_repo, group: group) }
- let!(:active_project_milestone1) do
+ let_it_be(:active_project_milestone1) do
create(
:milestone,
project: project,
@@ -83,12 +83,12 @@ describe 'Group milestones' do
description: 'Lorem Ipsum is simply dummy text'
)
end
- let!(:active_project_milestone2) { create(:milestone, project: other_project, state: 'active', title: 'v1.1') }
- let!(:closed_project_milestone1) { create(:milestone, project: project, state: 'closed', title: 'v2.0') }
- let!(:closed_project_milestone2) { create(:milestone, project: other_project, state: 'closed', title: 'v2.0') }
- let!(:active_group_milestone) { create(:milestone, group: group, state: 'active', title: 'GL-113') }
- let!(:closed_group_milestone) { create(:milestone, group: group, state: 'closed') }
- let!(:issue) do
+ let_it_be(:active_project_milestone2) { create(:milestone, project: other_project, state: 'active', title: 'v1.1') }
+ let_it_be(:closed_project_milestone1) { create(:milestone, project: project, state: 'closed', title: 'v2.0') }
+ let_it_be(:closed_project_milestone2) { create(:milestone, project: other_project, state: 'closed', title: 'v2.0') }
+ let_it_be(:active_group_milestone) { create(:milestone, group: group, state: 'active', title: 'GL-113') }
+ let_it_be(:closed_group_milestone) { create(:milestone, group: group, state: 'closed') }
+ let_it_be(:issue) do
create :issue, project: project, assignees: [user], author: user, milestone: active_project_milestone1
end
@@ -143,38 +143,111 @@ describe 'Group milestones' do
expect(page).to have_content('Issues 1 Open: 1 Closed: 0')
expect(page).to have_link(issue.title, href: project_issue_path(issue.project, issue))
end
+ end
+ end
+
+ describe 'milestone tabs', :js do
+ context 'for a legacy group milestone' do
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:issue) { create(:labeled_issue, project: project, milestone: milestone, labels: [label], assignees: [create(:user)]) }
+ let_it_be(:mr) { create(:merge_request, source_project: project, milestone: milestone) }
+
+ before do
+ visit group_milestone_path(group, milestone.title, title: milestone.title)
+ end
+
+ it 'renders the issues tab' do
+ within('#tab-issues') do
+ expect(page).to have_content issue.title
+ end
+ end
+
+ it 'renders the merge requests tab' do
+ within('.js-milestone-tabs') do
+ click_link('Merge Requests')
+ end
- describe 'labels' do
- before do
- create(:label, project: project, title: 'bug') do |label|
- issue.labels << label
- end
+ within('#tab-merge-requests') do
+ expect(page).to have_content mr.title
+ end
+ end
+
+ it 'renders the participants tab' do
+ within('.js-milestone-tabs') do
+ click_link('Participants')
+ end
- create(:label, project: project, title: 'feature') do |label|
- issue.labels << label
- end
+ within('#tab-participants') do
+ expect(page).to have_content issue.assignees.first.name
end
+ end
- it 'renders labels' do
- click_link 'v1.0'
+ it 'renders the labels tab' do
+ within('.js-milestone-tabs') do
+ click_link('Labels')
+ end
- page.within('#tab-issues') do
- expect(page).to have_content 'bug'
- expect(page).to have_content 'feature'
- end
+ within('#tab-labels') do
+ expect(page).to have_content label.title
end
+ end
+ end
+
+ context 'for a group milestone' do
+ let_it_be(:other_project) { create(:project_empty_repo, group: group) }
+ let_it_be(:milestone) { create(:milestone, group: group) }
- it 'renders labels list', :js do
- click_link 'v1.0'
+ let_it_be(:project_label) { create(:label, project: project) }
+ let_it_be(:other_project_label) { create(:label, project: other_project) }
- page.within('.content .nav-links') do
- page.find(:xpath, "//a[@href='#tab-labels']").click
- end
+ let_it_be(:project_issue) { create(:labeled_issue, project: project, milestone: milestone, labels: [project_label], assignees: [create(:user)]) }
+ let_it_be(:other_project_issue) { create(:labeled_issue, project: other_project, milestone: milestone, labels: [other_project_label], assignees: [create(:user)]) }
+
+ let_it_be(:project_mr) { create(:merge_request, source_project: project, milestone: milestone) }
+ let_it_be(:other_project_mr) { create(:merge_request, source_project: other_project, milestone: milestone) }
+
+ before do
+ visit group_milestone_path(group, milestone)
+ end
+
+ it 'renders the issues tab' do
+ within('#tab-issues') do
+ expect(page).to have_content project_issue.title
+ expect(page).to have_content other_project_issue.title
+ end
+ end
+
+ it 'renders the merge requests tab' do
+ within('.js-milestone-tabs') do
+ click_link('Merge Requests')
+ end
+
+ within('#tab-merge-requests') do
+ expect(page).to have_content project_mr.title
+ expect(page).to have_content other_project_mr.title
+ end
+ end
+
+ it 'renders the participants tab' do
+ within('.js-milestone-tabs') do
+ click_link('Participants')
+ end
+
+ within('#tab-participants') do
+ expect(page).to have_content project_issue.assignees.first.name
+ expect(page).to have_content other_project_issue.assignees.first.name
+ end
+ end
+
+ it 'renders the labels tab' do
+ within('.js-milestone-tabs') do
+ click_link('Labels')
+ end
- page.within('#tab-labels') do
- expect(page).to have_content 'bug'
- expect(page).to have_content 'feature'
- end
+ within('#tab-labels') do
+ expect(page).to have_content project_label.title
+ expect(page).to have_content other_project_label.title
end
end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index ca994c95df8..e958ebb1275 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -189,7 +189,7 @@ describe 'Group' do
expect(page).to have_selector '#confirm_name_input:focus'
end
- it 'removes group' do
+ it 'removes group', :sidekiq_might_not_need_inline do
expect { remove_with_confirm('Remove group', group.path) }.to change {Group.count}.by(-1)
expect(group.members.all.count).to be_zero
expect(page).to have_content "scheduled for deletion"
@@ -237,14 +237,28 @@ describe 'Group' do
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
let!(:project) { create(:project, namespace: group) }
- let!(:path) { group_path(group) }
it 'renders projects and groups on the page' do
- visit path
+ visit group_path(group)
wait_for_requests
expect(page).to have_content(nested_group.name)
expect(page).to have_content(project.name)
+ expect(page).to have_link('Group overview')
+ end
+
+ it 'renders subgroup page with the text "Subgroup overview"' do
+ visit group_path(nested_group)
+ wait_for_requests
+
+ expect(page).to have_link('Subgroup overview')
+ end
+
+ it 'renders project page with the text "Project overview"' do
+ visit project_path(project)
+ wait_for_requests
+
+ expect(page).to have_link('Project overview')
end
end
diff --git a/spec/features/import/manifest_import_spec.rb b/spec/features/import/manifest_import_spec.rb
index e9471257544..89bf69dea7d 100644
--- a/spec/features/import/manifest_import_spec.rb
+++ b/spec/features/import/manifest_import_spec.rb
@@ -24,7 +24,7 @@ describe 'Import multiple repositories by uploading a manifest file', :js do
expect(page).to have_content('https://android-review.googlesource.com/platform/build/blueprint')
end
- it 'imports successfully imports a project' do
+ it 'imports successfully imports a project', :sidekiq_might_not_need_inline do
visit new_import_manifest_path
attach_file('manifest', Rails.root.join('spec/fixtures/aosp_manifest.xml'))
diff --git a/spec/features/issuables/markdown_references/internal_references_spec.rb b/spec/features/issuables/markdown_references/internal_references_spec.rb
index f3b534bca49..efd84cf67b0 100644
--- a/spec/features/issuables/markdown_references/internal_references_spec.rb
+++ b/spec/features/issuables/markdown_references/internal_references_spec.rb
@@ -64,7 +64,7 @@ describe "Internal references", :js do
visit(project_issue_path(public_project, public_project_issue))
end
- it "shows references" do
+ it "shows references", :sidekiq_might_not_need_inline do
page.within("#merge-requests .merge-requests-title") do
expect(page).to have_content("Related merge requests")
expect(page).to have_css(".mr-count-badge")
@@ -133,7 +133,7 @@ describe "Internal references", :js do
visit(project_merge_request_path(public_project, public_project_merge_request))
end
- it "shows references" do
+ it "shows references", :sidekiq_might_not_need_inline do
expect(page).to have_content("mentioned in merge request #{private_project_merge_request.to_reference(public_project)}")
.and have_content(private_project_user.name)
end
diff --git a/spec/features/issuables/markdown_references/jira_spec.rb b/spec/features/issuables/markdown_references/jira_spec.rb
index 8085918f533..c5818691b3c 100644
--- a/spec/features/issuables/markdown_references/jira_spec.rb
+++ b/spec/features/issuables/markdown_references/jira_spec.rb
@@ -17,7 +17,9 @@ describe "Jira", :js do
stub_request(:get, "https://jira.example.com/rest/api/2/issue/JIRA-5")
stub_request(:post, "https://jira.example.com/rest/api/2/issue/JIRA-5/comment")
- allow_any_instance_of(JIRA::Resource::Issue).to receive(:remotelink).and_return(remotelink)
+ allow_next_instance_of(JIRA::Resource::Issue) do |instance|
+ allow(instance).to receive(:remotelink).and_return(remotelink)
+ end
sign_in(user)
@@ -46,7 +48,7 @@ describe "Jira", :js do
end
end
- it "creates a note on the referenced issues" do
+ it "creates a note on the referenced issues", :sidekiq_might_not_need_inline do
click_button("Comment")
wait_for_requests
diff --git a/spec/features/issuables/sorting_list_spec.rb b/spec/features/issuables/sorting_list_spec.rb
index b4531f5da4e..b7813c8ba30 100644
--- a/spec/features/issuables/sorting_list_spec.rb
+++ b/spec/features/issuables/sorting_list_spec.rb
@@ -57,7 +57,7 @@ describe 'Sort Issuable List' do
it 'is "last updated"' do
visit_merge_requests_with_state(project, 'merged')
- expect(find('.issues-other-filters')).to have_content('Last updated')
+ expect(find('.filter-dropdown-container')).to have_content('Last updated')
expect(first_merge_request).to include(last_updated_issuable.title)
expect(last_merge_request).to include(first_updated_issuable.title)
end
@@ -69,7 +69,7 @@ describe 'Sort Issuable List' do
it 'is "last updated"' do
visit_merge_requests_with_state(project, 'closed')
- expect(find('.issues-other-filters')).to have_content('Last updated')
+ expect(find('.filter-dropdown-container')).to have_content('Last updated')
expect(first_merge_request).to include(last_updated_issuable.title)
expect(last_merge_request).to include(first_updated_issuable.title)
end
@@ -81,7 +81,7 @@ describe 'Sort Issuable List' do
it 'is "created date"' do
visit_merge_requests_with_state(project, 'all')
- expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(find('.filter-dropdown-container')).to have_content('Created date')
expect(first_merge_request).to include(last_created_issuable.title)
expect(last_merge_request).to include(first_created_issuable.title)
end
@@ -94,7 +94,7 @@ describe 'Sort Issuable List' do
it 'supports sorting in asc and desc order' do
visit_merge_requests_with_state(project, 'open')
- page.within('.issues-other-filters') do
+ page.within('.filter-dropdown-container') do
click_button('Created date')
click_link('Last updated')
end
@@ -102,7 +102,7 @@ describe 'Sort Issuable List' do
expect(first_merge_request).to include(last_updated_issuable.title)
expect(last_merge_request).to include(first_updated_issuable.title)
- find('.issues-other-filters .filter-dropdown-container .rspec-reverse-sort').click
+ find('.filter-dropdown-container .rspec-reverse-sort').click
expect(first_merge_request).to include(first_updated_issuable.title)
expect(last_merge_request).to include(last_updated_issuable.title)
@@ -133,7 +133,7 @@ describe 'Sort Issuable List' do
it 'is "created date"' do
visit_issues project
- expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(find('.filter-dropdown-container')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -145,7 +145,7 @@ describe 'Sort Issuable List' do
it 'is "created date"' do
visit_issues_with_state(project, 'open')
- expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(find('.filter-dropdown-container')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -157,7 +157,7 @@ describe 'Sort Issuable List' do
it 'is "last updated"' do
visit_issues_with_state(project, 'closed')
- expect(find('.issues-other-filters')).to have_content('Last updated')
+ expect(find('.filter-dropdown-container')).to have_content('Last updated')
expect(first_issue).to include(last_updated_issuable.title)
expect(last_issue).to include(first_updated_issuable.title)
end
@@ -169,7 +169,7 @@ describe 'Sort Issuable List' do
it 'is "created date"' do
visit_issues_with_state(project, 'all')
- expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(find('.filter-dropdown-container')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -183,7 +183,7 @@ describe 'Sort Issuable List' do
end
it 'shows the sort order as created date' do
- expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(find('.filter-dropdown-container')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -196,7 +196,7 @@ describe 'Sort Issuable List' do
it 'supports sorting in asc and desc order' do
visit_issues_with_state(project, 'open')
- page.within('.issues-other-filters') do
+ page.within('.filter-dropdown-container') do
click_button('Created date')
click_link('Last updated')
end
@@ -204,7 +204,7 @@ describe 'Sort Issuable List' do
expect(first_issue).to include(last_updated_issuable.title)
expect(last_issue).to include(first_updated_issuable.title)
- find('.issues-other-filters .filter-dropdown-container .rspec-reverse-sort').click
+ find('.filter-dropdown-container .rspec-reverse-sort').click
expect(first_issue).to include(first_updated_issuable.title)
expect(last_issue).to include(last_updated_issuable.title)
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index 1c56902a27d..bb57d69148b 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -68,7 +68,7 @@ describe 'Dropdown hint', :js do
it 'filters with text' do
filtered_search.set('a')
- expect(find(js_dropdown_hint)).to have_selector('.filter-dropdown .filter-dropdown-item', count: 5)
+ expect(find(js_dropdown_hint)).to have_selector('.filter-dropdown .filter-dropdown-item', count: 6)
end
end
@@ -104,6 +104,15 @@ describe 'Dropdown hint', :js do
expect_filtered_search_input_empty
end
+ it 'opens the release dropdown when you click on release' do
+ click_hint('release')
+
+ expect(page).to have_css(js_dropdown_hint, visible: false)
+ expect(page).to have_css('#js-dropdown-release', visible: true)
+ expect_tokens([{ name: 'Release' }])
+ expect_filtered_search_input_empty
+ end
+
it 'opens the label dropdown when you click on label' do
click_hint('label')
diff --git a/spec/features/issues/filtered_search/dropdown_release_spec.rb b/spec/features/issues/filtered_search/dropdown_release_spec.rb
new file mode 100644
index 00000000000..eea7f2d7848
--- /dev/null
+++ b/spec/features/issues/filtered_search/dropdown_release_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Dropdown release', :js do
+ include FilteredSearchHelpers
+
+ let!(:project) { create(:project, :repository) }
+ let!(:user) { create(:user) }
+ let!(:release) { create(:release, tag: 'v1.0', project: project) }
+ let!(:crazy_release) { create(:release, tag: '☺!/"#%&\'{}+,-.<>;=@]_`{|}🚀', project: project) }
+
+ def filtered_search
+ find('.filtered-search')
+ end
+
+ def filter_dropdown
+ find('#js-dropdown-release .filter-dropdown')
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ create(:issue, project: project)
+
+ visit project_issues_path(project)
+ end
+
+ describe 'behavior' do
+ before do
+ filtered_search.set('release:')
+ end
+
+ def expect_results(count)
+ expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: count)
+ end
+
+ it 'loads all the releases when opened' do
+ expect_results(2)
+ end
+
+ it 'filters by tag name' do
+ filtered_search.send_keys("☺")
+ expect_results(1)
+ end
+
+ it 'fills in the release name when the autocomplete hint is clicked' do
+ find('#js-dropdown-release .filter-dropdown-item', text: crazy_release.tag).click
+
+ expect(page).to have_css('#js-dropdown-release', visible: false)
+ expect_tokens([release_token(crazy_release.tag)])
+ expect_filtered_search_input_empty
+ end
+ end
+end
diff --git a/spec/features/issues/notes_on_issues_spec.rb b/spec/features/issues/notes_on_issues_spec.rb
index 5247baa58a1..74eb699c7ef 100644
--- a/spec/features/issues/notes_on_issues_spec.rb
+++ b/spec/features/issues/notes_on_issues_spec.rb
@@ -23,7 +23,7 @@ describe 'Create notes on issues', :js do
submit_comment(note_text)
end
- it 'creates a note with reference and cross references the issue' do
+ it 'creates a note with reference and cross references the issue', :sidekiq_might_not_need_inline do
page.within('div#notes li.note div.note-text') do
expect(page).to have_content(note_text)
expect(page.find('a')).to have_content(mention.to_reference)
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index be31c45b373..8322a6afa04 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -67,7 +67,7 @@ describe 'User creates branch and merge request on issue page', :js do
end
context 'when branch name is auto-generated' do
- it 'creates a merge request' do
+ it 'creates a merge request', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
select_dropdown_option('create-mr')
@@ -96,7 +96,7 @@ describe 'User creates branch and merge request on issue page', :js do
context 'when branch name is custom' do
let(:branch_name) { 'custom-branch-name' }
- it 'creates a merge request' do
+ it 'creates a merge request', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
select_dropdown_option('create-mr', branch_name)
diff --git a/spec/features/issues/user_creates_confidential_merge_request_spec.rb b/spec/features/issues/user_creates_confidential_merge_request_spec.rb
index 24089bdeb81..838c0a6349c 100644
--- a/spec/features/issues/user_creates_confidential_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_confidential_merge_request_spec.rb
@@ -42,7 +42,7 @@ describe 'User creates confidential merge request on issue page', :js do
visit_confidential_issue
end
- it 'create merge request in fork' do
+ it 'create merge request in fork', :sidekiq_might_not_need_inline do
click_button 'Create confidential merge request'
page.within '.create-confidential-merge-request-dropdown-menu' do
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index a71395c0e47..39ce3415727 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -92,19 +92,6 @@ describe "User creates issue" do
.and have_content(label_titles.first)
end
end
-
- context "with Zoom link" do
- it "adds Zoom button" do
- issue_title = "Issue containing Zoom meeting link"
- zoom_url = "https://gitlab.zoom.us/j/123456789"
-
- fill_in("Title", with: issue_title)
- fill_in("Description", with: zoom_url)
- click_button("Submit issue")
-
- expect(page).to have_link('Join Zoom meeting', href: zoom_url)
- end
- end
end
context "when signed in as user with special characters in their name" do
diff --git a/spec/features/issues/user_toggles_subscription_spec.rb b/spec/features/issues/user_toggles_subscription_spec.rb
index 165d41950da..ba167362511 100644
--- a/spec/features/issues/user_toggles_subscription_spec.rb
+++ b/spec/features/issues/user_toggles_subscription_spec.rb
@@ -33,7 +33,6 @@ describe "User toggles subscription", :js do
it 'is disabled' do
expect(page).to have_content('Notifications have been disabled by the project or group owner')
- expect(page).to have_selector('.js-emails-disabled', visible: true)
expect(page).not_to have_selector('.js-issuable-subscribe-button')
end
end
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
index 4de67cfcdbe..e7fec41fae3 100644
--- a/spec/features/markdown/metrics_spec.rb
+++ b/spec/features/markdown/metrics_spec.rb
@@ -2,8 +2,9 @@
require 'spec_helper'
-describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching do
+describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidekiq_might_not_need_inline do
include PrometheusHelpers
+ include GrafanaApiHelpers
let(:user) { create(:user) }
let(:project) { create(:prometheus_project) }
@@ -14,11 +15,7 @@ describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching do
before do
configure_host
- import_common_metrics
- stub_any_prometheus_request_with_response
-
project.add_developer(user)
-
sign_in(user)
end
@@ -26,31 +23,58 @@ describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching do
restore_host
end
- it 'shows embedded metrics' do
- visit project_issue_path(project, issue)
+ context 'internal metrics embeds' do
+ before do
+ import_common_metrics
+ stub_any_prometheus_request_with_response
+ end
+
+ it 'shows embedded metrics' do
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_css('div.prometheus-graph')
+ expect(page).to have_text('Memory Usage (Total)')
+ expect(page).to have_text('Core Usage (Total)')
+ end
+
+ context 'when dashboard params are in included the url' do
+ let(:metrics_url) { metrics_project_environment_url(project, environment, **chart_params) }
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text('Memory Usage (Total)')
- expect(page).to have_text('Core Usage (Total)')
+ let(:chart_params) do
+ {
+ group: 'System metrics (Kubernetes)',
+ title: 'Memory Usage (Pod average)',
+ y_label: 'Memory Used per Pod (MB)'
+ }
+ end
+
+ it 'shows embedded metrics for the specific chart' do
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_css('div.prometheus-graph')
+ expect(page).to have_text(chart_params[:title])
+ expect(page).to have_text(chart_params[:y_label])
+ end
+ end
end
- context 'when dashboard params are in included the url' do
- let(:metrics_url) { metrics_project_environment_url(project, environment, **chart_params) }
+ context 'grafana metrics embeds' do
+ let(:grafana_integration) { create(:grafana_integration, project: project) }
+ let(:grafana_base_url) { grafana_integration.grafana_url }
+ let(:metrics_url) { valid_grafana_dashboard_link(grafana_base_url) }
- let(:chart_params) do
- {
- group: 'System metrics (Kubernetes)',
- title: 'Memory Usage (Pod average)',
- y_label: 'Memory Used per Pod (MB)'
- }
+ before do
+ stub_dashboard_request(grafana_base_url)
+ stub_datasource_request(grafana_base_url)
+ stub_all_grafana_proxy_requests(grafana_base_url)
end
- it 'shows embedded metrics for the specifiec chart' do
+ it 'shows embedded metrics' do
visit project_issue_path(project, issue)
expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text(chart_params[:title])
- expect(page).to have_text(chart_params[:y_label])
+ expect(page).to have_text('Expired / Evicted')
+ expect(page).to have_text('expired - test-attribute-value')
end
end
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 030638cba71..4e161d530d3 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'a maintainer edits files on a source-branch of an MR from a fork', :js do
+describe 'a maintainer edits files on a source-branch of an MR from a fork', :js, :sidekiq_might_not_need_inline do
include ProjectForksHelper
let(:user) { create(:user, username: 'the-maintainer') }
let(:target_project) { create(:project, :public, :repository) }
@@ -20,7 +20,7 @@ describe 'a maintainer edits files on a source-branch of an MR from a fork', :js
end
before do
- stub_feature_flags(web_ide_default: false)
+ stub_feature_flags(web_ide_default: false, single_mr_diff_view: false)
target_project.add_maintainer(user)
sign_in(user)
@@ -32,6 +32,8 @@ describe 'a maintainer edits files on a source-branch of an MR from a fork', :js
wait_for_requests
end
+ it_behaves_like 'rendering a single diff version'
+
it 'mentions commits will go to the source branch' do
expect(page).to have_content('Your changes can be committed to fix because a merge request is open.')
end
diff --git a/spec/features/merge_request/user_accepts_merge_request_spec.rb b/spec/features/merge_request/user_accepts_merge_request_spec.rb
index 4d305d43351..5e1ff232b80 100644
--- a/spec/features/merge_request/user_accepts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_accepts_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'User accepts a merge request', :js do
+describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inline do
let(:merge_request) { create(:merge_request, :with_diffs, :simple, source_project: project) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb b/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
index be403abcc4d..0ecd32565d0 100644
--- a/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
+++ b/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
@@ -23,7 +23,7 @@ describe 'create a merge request, allowing commits from members who can merge to
sign_in(user)
end
- it 'allows setting possible' do
+ it 'allows setting possible', :sidekiq_might_not_need_inline do
visit_new_merge_request
check 'Allow commits from members who can merge to the target branch'
@@ -35,7 +35,7 @@ describe 'create a merge request, allowing commits from members who can merge to
expect(page).to have_content('Allows commits from members who can merge to the target branch')
end
- it 'shows a message when one of the projects is private' do
+ it 'shows a message when one of the projects is private', :sidekiq_might_not_need_inline do
source_project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
visit_new_merge_request
@@ -43,7 +43,7 @@ describe 'create a merge request, allowing commits from members who can merge to
expect(page).to have_content('Not available for private projects')
end
- it 'shows a message when the source branch is protected' do
+ it 'shows a message when the source branch is protected', :sidekiq_might_not_need_inline do
create(:protected_branch, project: source_project, name: 'fix')
visit_new_merge_request
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 19b8a7f74b7..6a23b6cdf60 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -13,12 +13,15 @@ describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in(user)
visit(diffs_project_merge_request_path(project, merge_request))
end
+ it_behaves_like 'rendering a single diff version'
+
context 'when viewing comments' do
context 'when toggling inline comments' do
context 'in a single file' do
diff --git a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
index e0724a04ea3..e6634a8ff39 100644
--- a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
@@ -9,6 +9,7 @@ describe 'Merge request > User creates image diff notes', :js do
let(:user) { project.creator }
before do
+ stub_feature_flags(single_mr_diff_view: false)
sign_in(user)
# Stub helper to return any blob file as image from public app folder.
@@ -17,6 +18,8 @@ describe 'Merge request > User creates image diff notes', :js do
allow_any_instance_of(DiffHelper).to receive(:diff_file_old_blob_raw_url).and_return('/favicon.png')
end
+ it_behaves_like 'rendering a single diff version'
+
context 'create commit diff notes' do
commit_id = '2f63565e7aac07bcdadb654e253078b727143ec4'
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index f92791cc810..67f6d8ebe32 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -25,6 +25,11 @@ describe "User creates a merge request", :js do
click_button("Compare branches")
+ page.within('.merge-request-form') do
+ expect(page.find('#merge_request_title')['placeholder']).to eq 'Title'
+ expect(page.find('#merge_request_description')['placeholder']).to eq 'Describe the goal of the changes and what reviewers should be aware of.'
+ end
+
fill_in("Title", with: title)
click_button("Submit merge request")
@@ -36,7 +41,7 @@ describe "User creates a merge request", :js do
context "to a forked project" do
let(:forked_project) { fork_project(project, user, namespace: user.namespace, repository: true) }
- it "creates a merge request" do
+ it "creates a merge request", :sidekiq_might_not_need_inline do
visit(project_new_merge_request_path(forked_project))
expect(page).to have_content("Source branch").and have_content("Target branch")
diff --git a/spec/features/merge_request/user_edits_merge_request_spec.rb b/spec/features/merge_request/user_edits_merge_request_spec.rb
index 81c56855961..821db8a1d5b 100644
--- a/spec/features/merge_request/user_edits_merge_request_spec.rb
+++ b/spec/features/merge_request/user_edits_merge_request_spec.rb
@@ -17,7 +17,7 @@ describe 'User edits a merge request', :js do
end
it 'changes the target branch' do
- expect(page).to have_content('Target branch')
+ expect(page).to have_content('From master into feature')
select2('merge-test', from: '#merge_request_target_branch')
click_button('Save changes')
diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb
index f7317ec5ca7..ba7abd3af2c 100644
--- a/spec/features/merge_request/user_expands_diff_spec.rb
+++ b/spec/features/merge_request/user_expands_diff_spec.rb
@@ -7,6 +7,8 @@ describe 'User expands diff', :js do
let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) }
before do
+ stub_feature_flags(single_mr_diff_view: false)
+
allow(Gitlab::Git::Diff).to receive(:size_limit).and_return(100.kilobytes)
allow(Gitlab::Git::Diff).to receive(:collapse_limit).and_return(10.kilobytes)
@@ -15,6 +17,8 @@ describe 'User expands diff', :js do
wait_for_requests
end
+ it_behaves_like 'rendering a single diff version'
+
it 'allows user to expand diff' do
page.within find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd"]') do
click_link 'Click to expand it.'
diff --git a/spec/features/merge_request/user_merges_merge_request_spec.rb b/spec/features/merge_request/user_merges_merge_request_spec.rb
index da15a4bda4b..32e40740a61 100644
--- a/spec/features/merge_request/user_merges_merge_request_spec.rb
+++ b/spec/features/merge_request/user_merges_merge_request_spec.rb
@@ -10,7 +10,7 @@ describe "User merges a merge request", :js do
end
shared_examples "fast forward merge a merge request" do
- it "merges a merge request" do
+ it "merges a merge request", :sidekiq_might_not_need_inline do
expect(page).to have_content("Fast-forward merge without a merge commit").and have_button("Merge")
page.within(".mr-state-widget") do
diff --git a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
index 4afbf30ece4..419f741d0ea 100644
--- a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
@@ -89,12 +89,12 @@ describe 'Merge request > User merges only if pipeline succeeds', :js do
context 'when CI skipped' do
let(:status) { :skipped }
- it 'allows MR to be merged' do
+ it 'does not allow MR to be merged' do
visit project_merge_request_path(project, merge_request)
wait_for_requests
- expect(page).to have_button 'Merge'
+ expect(page).not_to have_button 'Merge'
end
end
end
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index ffc12ffdbaf..e40276f74e4 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -142,7 +142,7 @@ describe 'Merge request > User merges when pipeline succeeds', :js do
refresh
end
- it 'merges merge request' do
+ it 'merges merge request', :sidekiq_might_not_need_inline do
expect(page).to have_content 'The changes were merged'
expect(merge_request.reload).to be_merged
end
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 8b16760606c..6328c0a5133 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -14,12 +14,15 @@ describe 'Merge request > User posts diff notes', :js do
let(:test_note_comment) { 'this is a test note!' }
before do
+ stub_feature_flags(single_mr_diff_view: false)
set_cookie('sidebar_collapsed', 'true')
project.add_developer(user)
sign_in(user)
end
+ it_behaves_like 'rendering a single diff version'
+
context 'when hovering over a parallel view diff file' do
before do
visit diffs_project_merge_request_path(project, merge_request, view: 'parallel')
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index e3ee80a47d7..f0949fefa3b 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -9,6 +9,7 @@ describe 'Merge request > User resolves conflicts', :js do
before do
# In order to have the diffs collapsed, we need to disable the increase feature
stub_feature_flags(gitlab_git_diff_size_limit_increase: false)
+ stub_feature_flags(single_mr_diff_view: false)
end
def create_merge_request(source_branch)
@@ -17,7 +18,9 @@ describe 'Merge request > User resolves conflicts', :js do
end
end
- shared_examples "conflicts are resolved in Interactive mode" do
+ it_behaves_like 'rendering a single diff version'
+
+ shared_examples 'conflicts are resolved in Interactive mode' do
it 'conflicts are resolved in Interactive mode' do
within find('.files-wrapper .diff-file', text: 'files/ruby/popen.rb') do
click_button 'Use ours'
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index 8b41ef86791..7cb46d90092 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -19,6 +19,12 @@ describe 'Merge request > User resolves diff notes and threads', :js do
)
end
+ before do
+ stub_feature_flags(single_mr_diff_view: false)
+ end
+
+ it_behaves_like 'rendering a single diff version'
+
context 'no threads' do
before do
project.add_maintainer(user)
diff --git a/spec/features/merge_request/user_reverts_merge_request_spec.rb b/spec/features/merge_request/user_reverts_merge_request_spec.rb
index 71270b13c14..906ff1d61b2 100644
--- a/spec/features/merge_request/user_reverts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reverts_merge_request_spec.rb
@@ -20,7 +20,7 @@ describe 'User reverts a merge request', :js do
visit(merge_request_path(merge_request))
end
- it 'reverts a merge request' do
+ it 'reverts a merge request', :sidekiq_might_not_need_inline do
find("a[href='#modal-revert-commit']").click
page.within('#modal-revert-commit') do
@@ -33,7 +33,7 @@ describe 'User reverts a merge request', :js do
wait_for_requests
end
- it 'does not revert a merge request that was previously reverted' do
+ it 'does not revert a merge request that was previously reverted', :sidekiq_might_not_need_inline do
find("a[href='#modal-revert-commit']").click
page.within('#modal-revert-commit') do
@@ -51,7 +51,7 @@ describe 'User reverts a merge request', :js do
expect(page).to have_content('Sorry, we cannot revert this merge request automatically.')
end
- it 'reverts a merge request in a new merge request' do
+ it 'reverts a merge request in a new merge request', :sidekiq_might_not_need_inline do
find("a[href='#modal-revert-commit']").click
page.within('#modal-revert-commit') do
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index baef831c40e..e882b401122 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -7,8 +7,8 @@ describe 'Merge request > User sees avatars on diff notes', :js do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
- let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: "Bug NS-04") }
- let(:path) { "files/ruby/popen.rb" }
+ let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: 'Bug NS-04') }
+ let(:path) { 'files/ruby/popen.rb' }
let(:position) do
Gitlab::Diff::Position.new(
old_path: path,
@@ -21,12 +21,15 @@ describe 'Merge request > User sees avatars on diff notes', :js do
let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) }
before do
+ stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in user
set_cookie('sidebar_collapsed', 'true')
end
+ it_behaves_like 'rendering a single diff version'
+
context 'discussion tab' do
before do
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
index 1d62f7f0702..d7675cd06a8 100644
--- a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
@@ -15,7 +15,7 @@ describe 'Merge request > User cherry-picks', :js do
context 'Viewing a merged merge request' do
before do
- service = MergeRequests::MergeService.new(project, user)
+ service = MergeRequests::MergeService.new(project, user, sha: merge_request.diff_head_sha)
perform_enqueued_jobs do
service.execute(merge_request)
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 87fb3f5b3e7..cdffd2ae2f6 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -11,7 +11,7 @@ describe 'Merge request > User sees deployment widget', :js do
let(:role) { :developer }
let(:ref) { merge_request.target_branch }
let(:sha) { project.commit(ref).id }
- let(:pipeline) { create(:ci_pipeline_without_jobs, sha: sha, project: project, ref: ref) }
+ let(:pipeline) { create(:ci_pipeline, sha: sha, project: project, ref: ref) }
let!(:manual) { }
before do
@@ -33,7 +33,7 @@ describe 'Merge request > User sees deployment widget', :js do
end
context 'when a user created a new merge request with the same SHA' do
- let(:pipeline2) { create(:ci_pipeline_without_jobs, sha: sha, project: project, ref: 'new-patch-1') }
+ let(:pipeline2) { create(:ci_pipeline, sha: sha, project: project, ref: 'new-patch-1') }
let(:build2) { create(:ci_build, :success, pipeline: pipeline2) }
let(:environment2) { create(:environment, project: project) }
let!(:deployment2) { create(:deployment, environment: environment2, sha: sha, ref: 'new-patch-1', deployable: build2) }
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 8eeed7b0843..82dd779577c 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -9,6 +9,12 @@ describe 'Merge request > User sees diff', :js do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
+ before do
+ stub_feature_flags(single_mr_diff_view: false)
+ end
+
+ it_behaves_like 'rendering a single diff version'
+
context 'when linking to note' do
describe 'with unresolved note' do
let(:note) { create :diff_note_on_merge_request, project: project, noteable: merge_request }
@@ -62,7 +68,7 @@ describe 'Merge request > User sees diff', :js do
end
context 'as author' do
- it 'shows direct edit link' do
+ it 'shows direct edit link', :sidekiq_might_not_need_inline do
sign_in(author_user)
visit diffs_project_merge_request_path(project, merge_request)
@@ -72,7 +78,7 @@ describe 'Merge request > User sees diff', :js do
end
context 'as user who needs to fork' do
- it 'shows fork/cancel confirmation' do
+ it 'shows fork/cancel confirmation', :sidekiq_might_not_need_inline do
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index dd5662d83f2..abf159949db 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -67,13 +67,13 @@ describe 'Merge request > User sees pipelines triggered by merge request', :js d
end
end
- it 'sees the latest detached merge request pipeline as the head pipeline' do
+ it 'sees the latest detached merge request pipeline as the head pipeline', :sidekiq_might_not_need_inline do
page.within('.ci-widget-content') do
expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
- context 'when a user updated a merge request in the parent project' do
+ context 'when a user updated a merge request in the parent project', :sidekiq_might_not_need_inline do
let!(:push_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:push)
@@ -133,7 +133,7 @@ describe 'Merge request > User sees pipelines triggered by merge request', :js d
end
end
- context 'when a user merges a merge request in the parent project' do
+ context 'when a user merges a merge request in the parent project', :sidekiq_might_not_need_inline do
before do
click_button 'Merge when pipeline succeeds'
@@ -196,7 +196,7 @@ describe 'Merge request > User sees pipelines triggered by merge request', :js d
end
end
- it 'sees the latest branch pipeline as the head pipeline' do
+ it 'sees the latest branch pipeline as the head pipeline', :sidekiq_might_not_need_inline do
page.within('.ci-widget-content') do
expect(page).to have_content("##{push_pipeline.id}")
end
@@ -204,7 +204,7 @@ describe 'Merge request > User sees pipelines triggered by merge request', :js d
end
end
- context 'when a user created a merge request from a forked project to the parent project' do
+ context 'when a user created a merge request from a forked project to the parent project', :sidekiq_might_not_need_inline do
let(:merge_request) do
create(:merge_request,
source_project: forked_project,
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 6b6226ad1c5..098f41f120d 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe 'Merge request > User sees merge widget', :js do
include ProjectForksHelper
include TestReportsHelper
+ include ReactiveCachingHelpers
let(:project) { create(:project, :repository) }
let(:project_only_mwps) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
@@ -43,7 +44,7 @@ describe 'Merge request > User sees merge widget', :js do
context 'view merge request' do
let!(:environment) { create(:environment, project: project) }
let(:sha) { project.commit(merge_request.source_branch).sha }
- let(:pipeline) { create(:ci_pipeline_without_jobs, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
+ let(:pipeline) { create(:ci_pipeline, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
let(:build) { create(:ci_build, :success, pipeline: pipeline) }
let!(:deployment) do
@@ -75,7 +76,7 @@ describe 'Merge request > User sees merge widget', :js do
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
- it 'allows me to merge, see cherry-pick modal and load branches list' do
+ it 'allows me to merge, see cherry-pick modal and load branches list', :sidekiq_might_not_need_inline do
wait_for_requests
click_button 'Merge'
@@ -190,7 +191,7 @@ describe 'Merge request > User sees merge widget', :js do
end
shared_examples 'pipeline widget' do
- it 'shows head pipeline information' do
+ it 'shows head pipeline information', :sidekiq_might_not_need_inline do
within '.ci-widget-content' do
expect(page).to have_content("Detached merge request pipeline ##{pipeline.id} pending for #{pipeline.short_sha}")
end
@@ -229,7 +230,7 @@ describe 'Merge request > User sees merge widget', :js do
end
shared_examples 'pipeline widget' do
- it 'shows head pipeline information' do
+ it 'shows head pipeline information', :sidekiq_might_not_need_inline do
within '.ci-widget-content' do
expect(page).to have_content("Merged result pipeline ##{pipeline.id} pending for #{pipeline.short_sha}")
end
@@ -370,7 +371,7 @@ describe 'Merge request > User sees merge widget', :js do
visit project_merge_request_path(project, merge_request)
end
- it 'updates the MR widget' do
+ it 'updates the MR widget', :sidekiq_might_not_need_inline do
click_button 'Merge'
page.within('.mr-widget-body') do
@@ -416,7 +417,7 @@ describe 'Merge request > User sees merge widget', :js do
visit project_merge_request_path(project, merge_request)
end
- it 'user cannot remove source branch' do
+ it 'user cannot remove source branch', :sidekiq_might_not_need_inline do
expect(page).not_to have_field('remove-source-branch-input')
expect(page).to have_content('Deletes source branch')
end
@@ -435,6 +436,54 @@ describe 'Merge request > User sees merge widget', :js do
end
end
+ context 'exposed artifacts' do
+ subject { visit project_merge_request_path(project, merge_request) }
+
+ context 'when merge request has exposed artifacts' do
+ let(:merge_request) { create(:merge_request, :with_exposed_artifacts, source_project: project) }
+ let(:job) { merge_request.head_pipeline.builds.last }
+ let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ context 'when result has not been parsed yet' do
+ it 'shows parsing status' do
+ subject
+
+ expect(page).to have_content('Loading artifacts')
+ end
+ end
+
+ context 'when result has been parsed' do
+ before do
+ allow_any_instance_of(MergeRequest).to receive(:find_exposed_artifacts).and_return(
+ status: :parsed, data: [
+ {
+ text: "the artifact",
+ url: "/namespace1/project1/-/jobs/1/artifacts/file/ci_artifacts.txt",
+ job_path: "/namespace1/project1/-/jobs/1",
+ job_name: "test"
+ }
+ ])
+ end
+
+ it 'shows the parsed results' do
+ subject
+
+ expect(page).to have_content('View exposed artifact')
+ end
+ end
+ end
+
+ context 'when merge request does not have exposed artifacts' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ it 'does not show parsing status' do
+ subject
+
+ expect(page).not_to have_content('Loading artifacts')
+ end
+ end
+ end
+
context 'when merge request has test reports' do
let!(:head_pipeline) do
create(:ci_pipeline,
@@ -696,7 +745,7 @@ describe 'Merge request > User sees merge widget', :js do
context 'when MR has pipeline but user does not have permission' do
let(:sha) { project.commit(merge_request.source_branch).sha }
- let!(:pipeline) { create(:ci_pipeline_without_jobs, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
+ let!(:pipeline) { create(:ci_pipeline, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
before do
project.update(
diff --git a/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb b/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
index db0d632cdf2..3d25611e1ea 100644
--- a/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
+++ b/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
@@ -11,11 +11,14 @@ describe 'Merge request > User sees MR with deleted source branch', :js do
let(:user) { project.creator }
before do
+ stub_feature_flags(single_mr_diff_view: false)
merge_request.update!(source_branch: 'this-branch-does-not-exist')
sign_in(user)
visit project_merge_request_path(project, merge_request)
end
+ it_behaves_like 'rendering a single diff version'
+
it 'shows a message about missing source branch' do
expect(page).to have_content('Source branch does not exist.')
end
diff --git a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
index 0391794649c..9c9e0dacb87 100644
--- a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
@@ -21,7 +21,7 @@ describe 'Merge request > User sees notes from forked project', :js do
sign_in(user)
end
- it 'user can reply to the comment' do
+ it 'user can reply to the comment', :sidekiq_might_not_need_inline do
visit project_merge_request_path(project, merge_request)
expect(page).to have_content('A commit comment')
diff --git a/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
index 3e15a9c136b..d258b98f4a9 100644
--- a/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
@@ -28,7 +28,7 @@ describe 'Merge request > User sees pipelines from forked project', :js do
visit project_merge_request_path(target_project, merge_request)
end
- it 'user visits a pipelines page' do
+ it 'user visits a pipelines page', :sidekiq_might_not_need_inline do
page.within('.merge-request-tabs') { click_link 'Pipelines' }
page.within('.ci-table') do
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 7a8b938486a..f3d8f2b42f8 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -124,7 +124,7 @@ describe 'Merge request > User sees pipelines', :js do
threads.each { |thr| thr.join }
end
- it 'user sees pipeline in merge request widget' do
+ it 'user sees pipeline in merge request widget', :sidekiq_might_not_need_inline do
visit project_merge_request_path(project, @merge_request)
expect(page.find(".ci-widget")).to have_content(TestEnv::BRANCH_SHA['feature'])
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 62abcff7bda..c3fce9761df 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -16,11 +16,15 @@ describe 'Merge request > User sees versions', :js do
let!(:params) { {} }
before do
+ stub_feature_flags(single_mr_diff_view: false)
+
project.add_maintainer(user)
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request, params)
end
+ it_behaves_like 'rendering a single diff version'
+
shared_examples 'allows commenting' do |file_id:, line_code:, comment:|
it do
diff_file_selector = ".diff-file[id='#{file_id}']"
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 3d26ff3ed94..e2bcdfd1e2b 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -25,12 +25,15 @@ describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in(user)
visit(diffs_project_merge_request_path(project, merge_request))
end
+ it_behaves_like 'rendering a single diff version'
+
context 'single suggestion note' do
it 'hides suggestion popover' do
click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
diff --git a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
index 4db067a4e41..5e59bc87e68 100644
--- a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
+++ b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
@@ -8,6 +8,7 @@ describe 'Merge request > User toggles whitespace changes', :js do
let(:user) { project.creator }
before do
+ stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request)
@@ -15,6 +16,8 @@ describe 'Merge request > User toggles whitespace changes', :js do
find('.js-show-diff-settings').click
end
+ it_behaves_like 'rendering a single diff version'
+
it 'has a button to toggle whitespace changes' do
expect(page).to have_content 'Show whitespace changes'
end
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index 2d1eb260236..5a29477e597 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -9,6 +9,7 @@ describe 'User views diffs', :js do
let(:project) { create(:project, :public, :repository) }
before do
+ stub_feature_flags(single_mr_diff_view: false)
visit(diffs_project_merge_request_path(project, merge_request))
wait_for_requests
@@ -16,6 +17,8 @@ describe 'User views diffs', :js do
find('.js-toggle-tree-list').click
end
+ it_behaves_like 'rendering a single diff version'
+
shared_examples 'unfold diffs' do
it 'unfolds diffs upwards' do
first('.js-unfold').click
diff --git a/spec/features/merge_requests/user_squashes_merge_request_spec.rb b/spec/features/merge_requests/user_squashes_merge_request_spec.rb
index 4fc8c71e47e..a9b96c5bbf5 100644
--- a/spec/features/merge_requests/user_squashes_merge_request_spec.rb
+++ b/spec/features/merge_requests/user_squashes_merge_request_spec.rb
@@ -10,7 +10,7 @@ describe 'User squashes a merge request', :js do
let!(:original_head) { project.repository.commit('master') }
shared_examples 'squash' do
- it 'squashes the commits into a single commit, and adds a merge commit' do
+ it 'squashes the commits into a single commit, and adds a merge commit', :sidekiq_might_not_need_inline do
expect(page).to have_content('Merged')
latest_master_commits = project.repository.commits_between(original_head.sha, 'master').map(&:raw)
@@ -31,7 +31,7 @@ describe 'User squashes a merge request', :js do
end
shared_examples 'no squash' do
- it 'accepts the merge request without squashing' do
+ it 'accepts the merge request without squashing', :sidekiq_might_not_need_inline do
expect(page).to have_content('Merged')
expect(project.repository).to be_merged_to_root_ref(source_branch)
end
@@ -47,7 +47,9 @@ describe 'User squashes a merge request', :js do
before do
# Prevent source branch from being removed so we can use be_merged_to_root_ref
# method to check if squash was performed or not
- allow_any_instance_of(MergeRequest).to receive(:force_remove_source_branch?).and_return(false)
+ allow_next_instance_of(MergeRequest) do |instance|
+ allow(instance).to receive(:force_remove_source_branch?).and_return(false)
+ end
project.add_maintainer(user)
sign_in user
diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb
index 0b51ca12997..09378cab5e3 100644
--- a/spec/features/milestones/user_views_milestones_spec.rb
+++ b/spec/features/milestones/user_views_milestones_spec.rb
@@ -34,4 +34,31 @@ describe "User views milestones" do
.and have_content(closed_issue.title)
end
end
+
+ context "with associated releases" do
+ set(:first_release) { create(:release, project: project, name: "The first release", milestones: [milestone], released_at: Time.zone.parse('2019-10-07')) }
+
+ context "with a single associated release" do
+ it "shows the associated release" do
+ expect(page).to have_content("Release #{first_release.name}")
+ expect(page).to have_link(first_release.name, href: project_releases_path(project, anchor: first_release.tag))
+ end
+ end
+
+ context "with lots of associated releases" do
+ set(:second_release) { create(:release, project: project, name: "The second release", milestones: [milestone], released_at: first_release.released_at + 1.day) }
+ set(:third_release) { create(:release, project: project, name: "The third release", milestones: [milestone], released_at: second_release.released_at + 1.day) }
+ set(:fourth_release) { create(:release, project: project, name: "The fourth release", milestones: [milestone], released_at: third_release.released_at + 1.day) }
+ set(:fifth_release) { create(:release, project: project, name: "The fifth release", milestones: [milestone], released_at: fourth_release.released_at + 1.day) }
+
+ it "shows the associated releases and the truncation text" do
+ expect(page).to have_content("Releases #{fifth_release.name} • #{fourth_release.name} • #{third_release.name} • 2 more releases")
+
+ expect(page).to have_link(fifth_release.name, href: project_releases_path(project, anchor: fifth_release.tag))
+ expect(page).to have_link(fourth_release.name, href: project_releases_path(project, anchor: fourth_release.tag))
+ expect(page).to have_link(third_release.name, href: project_releases_path(project, anchor: third_release.tag))
+ expect(page).to have_link("2 more releases", href: project_releases_path(project))
+ end
+ end
+ end
end
diff --git a/spec/features/populate_new_pipeline_vars_with_params_spec.rb b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
new file mode 100644
index 00000000000..5fe80e73e38
--- /dev/null
+++ b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe "Populate new pipeline CI variables with url params", :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:page_path) { new_project_pipeline_path(project) }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+
+ visit "#{page_path}?var[key1]=value1&file_var[key2]=value2"
+ end
+
+ it "var[key1]=value1 populates env_var variable correctly" do
+ page.within('.ci-variable-list .js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-variable-type').value).to eq('env_var')
+ expect(find('.js-ci-variable-input-key').value).to eq('key1')
+ expect(find('.js-ci-variable-input-value').text).to eq('value1')
+ end
+ end
+
+ it "file_var[key2]=value2 populates file variable correctly" do
+ page.within('.ci-variable-list .js-row:nth-child(2)') do
+ expect(find('.js-ci-variable-input-variable-type').value).to eq('file')
+ expect(find('.js-ci-variable-input-key').value).to eq('key2')
+ expect(find('.js-ci-variable-input-value').text).to eq('value2')
+ end
+ end
+end
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index e80a3cd32cc..0147963c0a3 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -22,7 +22,7 @@ describe 'Profile account page', :js do
expect(User.exists?(user.id)).to be_truthy
end
- it 'deletes user', :js do
+ it 'deletes user', :js, :sidekiq_might_not_need_inline do
click_button 'Delete account'
fill_in 'password', with: '12345678'
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index 0905ab0aef8..9839b3d6c80 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -23,6 +23,7 @@ describe 'User edit profile' do
fill_in 'user_location', with: 'Ukraine'
fill_in 'user_bio', with: 'I <3 GitLab'
fill_in 'user_organization', with: 'GitLab'
+ select 'Data Analyst', from: 'user_role'
submit_settings
expect(user.reload).to have_attributes(
@@ -31,7 +32,8 @@ describe 'User edit profile' do
twitter: 'testtwitter',
website_url: 'testurl',
bio: 'I <3 GitLab',
- organization: 'GitLab'
+ organization: 'GitLab',
+ role: 'data_analyst'
)
expect(find('#user_location').value).to eq 'Ukraine'
diff --git a/spec/features/project_group_variables_spec.rb b/spec/features/project_group_variables_spec.rb
new file mode 100644
index 00000000000..c1f1c442937
--- /dev/null
+++ b/spec/features/project_group_variables_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Project group variables', :js do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:subgroup_nested) { create(:group, parent: subgroup) }
+ let(:project) { create(:project, group: group) }
+ let(:project2) { create(:project, group: subgroup) }
+ let(:project3) { create(:project, group: subgroup_nested) }
+ let(:key1) { 'test_key' }
+ let(:key2) { 'test_key2' }
+ let(:key3) { 'test_key3' }
+ let!(:ci_variable) { create(:ci_group_variable, group: group, key: key1) }
+ let!(:ci_variable2) { create(:ci_group_variable, group: subgroup, key: key2) }
+ let!(:ci_variable3) { create(:ci_group_variable, group: subgroup_nested, key: key3) }
+ let(:project_path) { project_settings_ci_cd_path(project) }
+ let(:project2_path) { project_settings_ci_cd_path(project2) }
+ let(:project3_path) { project_settings_ci_cd_path(project3) }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ group.add_owner(user)
+ end
+
+ it 'project in group shows inherited vars from ancestor group' do
+ visit project_path
+ expect(page).to have_content(key1)
+ expect(page).to have_content(group.name)
+ end
+
+ it 'project in subgroup shows inherited vars from all ancestor groups' do
+ visit project2_path
+ expect(page).to have_content(key1)
+ expect(page).to have_content(key2)
+ expect(page).to have_content(group.name)
+ expect(page).to have_content(subgroup.name)
+ end
+
+ it 'project in nested subgroup shows inherited vars from all ancestor groups' do
+ visit project3_path
+ expect(page).to have_content(key1)
+ expect(page).to have_content(key2)
+ expect(page).to have_content(key3)
+ expect(page).to have_content(group.name)
+ expect(page).to have_content(subgroup.name)
+ expect(page).to have_content(subgroup_nested.name)
+ end
+
+ it 'project origin keys link to ancestor groups ci_cd settings' do
+ visit project_path
+ find('.group-origin-link').click
+ page.within('.js-ci-variable-list-section .js-row:nth-child(2)') do
+ expect(find('.js-ci-variable-input-key').value).to eq(key1)
+ end
+ end
+end
diff --git a/spec/features/projects/badges/pipeline_badge_spec.rb b/spec/features/projects/badges/pipeline_badge_spec.rb
index f2c57d702a5..af936c80886 100644
--- a/spec/features/projects/badges/pipeline_badge_spec.rb
+++ b/spec/features/projects/badges/pipeline_badge_spec.rb
@@ -22,7 +22,7 @@ describe 'Pipeline Badge' do
let!(:job) { create(:ci_build, pipeline: pipeline) }
context 'when the pipeline was successful' do
- it 'displays so on the badge' do
+ it 'displays so on the badge', :sidekiq_might_not_need_inline do
job.success
visit pipeline_project_badges_path(project, ref: ref, format: :svg)
@@ -33,7 +33,7 @@ describe 'Pipeline Badge' do
end
context 'when the pipeline failed' do
- it 'shows displays so on the badge' do
+ it 'shows displays so on the badge', :sidekiq_might_not_need_inline do
job.drop
visit pipeline_project_badges_path(project, ref: ref, format: :svg)
@@ -52,7 +52,7 @@ describe 'Pipeline Badge' do
allow(job).to receive(:prerequisites).and_return([double])
end
- it 'displays the preparing badge' do
+ it 'displays the preparing badge', :sidekiq_might_not_need_inline do
job.enqueue
visit pipeline_project_badges_path(project, ref: ref, format: :svg)
@@ -63,7 +63,7 @@ describe 'Pipeline Badge' do
end
context 'when the pipeline is running' do
- it 'shows displays so on the badge' do
+ it 'shows displays so on the badge', :sidekiq_might_not_need_inline do
create(:ci_build, pipeline: pipeline, name: 'second build', status_event: 'run')
visit pipeline_project_badges_path(project, ref: ref, format: :svg)
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 3b32d213754..0a5bc64b429 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -12,9 +12,11 @@ describe 'Editing file blob', :js do
let(:readme_file_path) { 'README.md' }
before do
- stub_feature_flags(web_ide_default: false)
+ stub_feature_flags(web_ide_default: false, single_mr_diff_view: false)
end
+ it_behaves_like 'rendering a single diff version'
+
context 'as a developer' do
let(:user) { create(:user) }
let(:role) { :developer }
@@ -27,14 +29,14 @@ describe 'Editing file blob', :js do
def edit_and_commit(commit_changes: true)
wait_for_requests
find('.js-edit-blob').click
- fill_editor(content: "class NextFeature\\nend\\n")
+ fill_editor(content: 'class NextFeature\\nend\\n')
if commit_changes
click_button 'Commit changes'
end
end
- def fill_editor(content: "class NextFeature\\nend\\n")
+ def fill_editor(content: 'class NextFeature\\nend\\n')
wait_for_requests
find('#editor')
execute_script("ace.edit('editor').setValue('#{content}')")
@@ -60,6 +62,13 @@ describe 'Editing file blob', :js do
expect(page).to have_content 'NextFeature'
end
+ it 'editing a template file in a sub directory does not change path' do
+ project.repository.create_file(user, 'ci/.gitlab-ci.yml', 'test', message: 'testing', branch_name: branch)
+ visit project_edit_blob_path(project, tree_join(branch, 'ci/.gitlab-ci.yml'))
+
+ expect(find_by_id('file_path').value).to eq('ci/.gitlab-ci.yml')
+ end
+
context 'from blob file path' do
before do
visit project_blob_path(project, tree_join(branch, file_path))
@@ -88,13 +97,13 @@ describe 'Editing file blob', :js do
context 'when rendering the preview' do
it 'renders content with CommonMark' do
visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
- fill_editor(content: "1. one\\n - sublist\\n")
+ fill_editor(content: '1. one\\n - sublist\\n')
click_link 'Preview'
wait_for_requests
# the above generates two separate lists (not embedded) in CommonMark
- expect(page).to have_content("sublist")
- expect(page).not_to have_xpath("//ol//li//ul")
+ expect(page).to have_content('sublist')
+ expect(page).not_to have_xpath('//ol//li//ul')
end
end
end
diff --git a/spec/features/projects/clusters/eks_spec.rb b/spec/features/projects/clusters/eks_spec.rb
index 758dccd6e49..e0ebccd85ac 100644
--- a/spec/features/projects/clusters/eks_spec.rb
+++ b/spec/features/projects/clusters/eks_spec.rb
@@ -10,6 +10,7 @@ describe 'AWS EKS Cluster', :js do
project.add_maintainer(user)
gitlab_sign_in(user)
allow(Projects::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
+ stub_application_setting(eks_integration_enabled: true)
end
context 'when user does not have a cluster and visits cluster index page' do
@@ -27,7 +28,7 @@ describe 'AWS EKS Cluster', :js do
end
it 'user sees a form to create an EKS cluster' do
- expect(page).to have_selector(:css, '.js-create-eks-cluster')
+ expect(page).to have_content('Create new Cluster on EKS')
end
end
end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index b5ab9faa14b..bdc946a9c98 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -67,17 +67,17 @@ describe 'Gcp Cluster', :js do
it 'user sees a cluster details page and creation status' do
subject
- expect(page).to have_content('Kubernetes cluster is being created on Google Kubernetes Engine...')
+ expect(page).to have_content('Kubernetes cluster is being created...')
Clusters::Cluster.last.provider.make_created!
- expect(page).to have_content('Kubernetes cluster was successfully created on Google Kubernetes Engine')
+ expect(page).to have_content('Kubernetes cluster was successfully created')
end
it 'user sees a error if something wrong during creation' do
subject
- expect(page).to have_content('Kubernetes cluster is being created on Google Kubernetes Engine...')
+ expect(page).to have_content('Kubernetes cluster is being created...')
Clusters::Cluster.last.provider.make_errored!('Something wrong!')
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 84f2e3e09ae..bdaeda83926 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -13,8 +13,12 @@ describe 'User Cluster', :js do
gitlab_sign_in(user)
allow(Projects::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
- allow_any_instance_of(Clusters::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute)
- allow_any_instance_of(Clusters::Cluster).to receive(:retrieve_connection_status).and_return(:connected)
+ allow_next_instance_of(Clusters::Kubernetes::CreateOrUpdateNamespaceService) do |instance|
+ allow(instance).to receive(:execute)
+ end
+ allow_next_instance_of(Clusters::Cluster) do |instance|
+ allow(instance).to receive(:retrieve_connection_status).and_return(:connected)
+ end
end
context 'when user does not have a cluster and visits cluster index page' do
diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb
index 46a6f62ba14..34b15aeaa25 100644
--- a/spec/features/projects/commit/cherry_pick_spec.rb
+++ b/spec/features/projects/commit/cherry_pick_spec.rb
@@ -55,12 +55,16 @@ describe 'Cherry-pick Commits' do
end
end
- context "I cherry-pick a commit in a new merge request" do
+ context "I cherry-pick a commit in a new merge request", :js do
it do
+ find('.header-action-buttons a.dropdown-toggle').click
find("a[href='#modal-cherry-pick-commit']").click
page.within('#modal-cherry-pick-commit') do
click_button 'Cherry-pick'
end
+
+ wait_for_requests
+
expect(page).to have_content("The commit has been successfully cherry-picked into cherry-pick-#{master_pickable_commit.short_id}. You can now submit a merge request to get this change into the original branch.")
expect(page).to have_content("From cherry-pick-#{master_pickable_commit.short_id} into master")
end
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index 131d9097f48..b22715a44f0 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -56,8 +56,6 @@ describe 'User browses commits' do
project.enable_ci
create(:ci_build, pipeline: pipeline)
-
- allow_any_instance_of(Ci::Pipeline).to receive(:ci_yaml_file).and_return('')
end
it 'renders commit ci info' do
@@ -94,8 +92,12 @@ describe 'User browses commits' do
let(:commit) { create(:commit, project: project) }
it 'renders successfully' do
- allow_any_instance_of(Gitlab::Diff::File).to receive(:blob).and_return(nil)
- allow_any_instance_of(Gitlab::Diff::File).to receive(:binary?).and_return(true)
+ allow_next_instance_of(Gitlab::Diff::File) do |instance|
+ allow(instance).to receive(:blob).and_return(nil)
+ end
+ allow_next_instance_of(Gitlab::Diff::File) do |instance|
+ allow(instance).to receive(:binary?).and_return(true)
+ end
visit(project_commit_path(project, commit))
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index 34bde29c8da..df5cec80ae4 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -107,7 +107,9 @@ describe "Compare", :js do
visit project_compare_index_path(project, from: "feature", to: "master")
allow(Commit).to receive(:max_diff_options).and_return(max_files: 3)
- allow_any_instance_of(DiffHelper).to receive(:render_overflow_warning?).and_return(true)
+ allow_next_instance_of(DiffHelper) do |instance|
+ allow(instance).to receive(:render_overflow_warning?).and_return(true)
+ end
click_button('Compare')
@@ -136,7 +138,7 @@ describe "Compare", :js do
def select_using_dropdown(dropdown_type, selection, commit: false)
dropdown = find(".js-compare-#{dropdown_type}-dropdown")
dropdown.find(".compare-dropdown-toggle").click
- # find input before using to wait for the inputs visiblity
+ # find input before using to wait for the inputs visibility
dropdown.find('.dropdown-menu')
dropdown.fill_in("Filter by Git revision", with: selection)
wait_for_requests
@@ -144,7 +146,7 @@ describe "Compare", :js do
if commit
dropdown.find('input[type="search"]').send_keys(:return)
else
- # find before all to wait for the items visiblity
+ # find before all to wait for the items visibility
dropdown.find("a[data-ref=\"#{selection}\"]", match: :first)
dropdown.all("a[data-ref=\"#{selection}\"]").last.click
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index dd690699ff6..3eab13cb820 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -175,8 +175,9 @@ describe 'Environment' do
#
# In EE we have to stub EE::Environment since it overwrites
# the "terminals" method.
- allow_any_instance_of(Gitlab.ee? ? EE::Environment : Environment)
- .to receive(:terminals) { nil }
+ allow_next_instance_of(Gitlab.ee? ? EE::Environment : Environment) do |instance|
+ allow(instance).to receive(:terminals) { nil }
+ end
visit terminal_project_environment_path(project, environment)
end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 1a2302b3d0c..74c2758c30f 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -71,7 +71,9 @@ describe 'Environments page', :js do
let!(:application_prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
before do
- allow_any_instance_of(Kubeclient::Client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ allow_next_instance_of(Kubeclient::Client) do |instance|
+ allow(instance).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
end
it 'shows one environment without error' do
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 9ec61743a11..5553e496e7a 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -42,7 +42,9 @@ describe 'Edit Project Settings' do
context 'When external issue tracker is enabled and issues enabled on project settings' do
it 'does not hide issues tab' do
- allow_any_instance_of(Project).to receive(:external_issue_tracker).and_return(JiraService.new)
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:external_issue_tracker).and_return(JiraService.new)
+ end
visit project_path(project)
@@ -54,7 +56,9 @@ describe 'Edit Project Settings' do
it 'hides issues tab' do
project.issues_enabled = false
project.save!
- allow_any_instance_of(Project).to receive(:external_issue_tracker).and_return(JiraService.new)
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:external_issue_tracker).and_return(JiraService.new)
+ end
visit project_path(project)
diff --git a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb
index 0e43f2fd26b..622764487d8 100644
--- a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb
+++ b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb
@@ -7,13 +7,11 @@ describe 'Projects > Files > User views files page' do
let(:user) { project.owner }
before do
- stub_feature_flags(vue_file_list: false)
-
sign_in user
visit project_tree_path(project, project.repository.root_ref)
end
- it 'user sees folders and submodules sorted together, followed by files' do
+ it 'user sees folders and submodules sorted together, followed by files', :js do
rows = all('td.tree-item-file-name').map(&:text)
tree = project.repository.tree
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 943c6e0e959..9fccb3441d6 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -7,7 +7,6 @@ describe 'Projects > Files > Project owner creates a license file', :js do
let(:project_maintainer) { project.owner }
before do
- stub_feature_flags(vue_file_list: false)
project.repository.delete_file(project_maintainer, 'LICENSE',
message: 'Remove LICENSE', branch_name: 'master')
sign_in(project_maintainer)
@@ -39,7 +38,7 @@ describe 'Projects > Files > Project owner creates a license file', :js do
end
it 'project maintainer creates a license file from the "Add license" link' do
- click_link 'Add license'
+ click_link 'Add LICENSE'
expect(page).to have_content('New file')
expect(current_path).to eq(
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index 9f63b312146..ad6c565c8f9 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -12,7 +12,7 @@ describe 'Projects > Files > Project owner sees a link to create a license file
it 'project maintainer creates a license file from a template' do
visit project_path(project)
- click_on 'Add license'
+ click_on 'Add LICENSE'
expect(page).to have_content('New file')
expect(current_path).to eq(
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 0b3f905b5de..10672bbec68 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -13,23 +13,22 @@ describe "User browses files" do
let(:user) { project.owner }
before do
- stub_feature_flags(vue_file_list: false)
sign_in(user)
end
- it "shows last commit for current directory" do
+ it "shows last commit for current directory", :js do
visit(tree_path_root_ref)
click_link("files")
last_commit = project.repository.last_commit_for_path(project.default_branch, "files")
- page.within(".blob-commit-info") do
+ page.within(".commit-detail") do
expect(page).to have_content(last_commit.short_id).and have_content(last_commit.author_name)
end
end
- context "when browsing the master branch" do
+ context "when browsing the master branch", :js do
before do
visit(tree_path_root_ref)
end
@@ -124,8 +123,7 @@ describe "User browses files" do
expect(current_path).to eq(project_tree_path(project, "markdown/doc/raketasks"))
expect(page).to have_content("backup_restore.md").and have_content("maintenance.md")
- click_link("shop")
- click_link("Maintenance")
+ click_link("maintenance.md")
expect(current_path).to eq(project_blob_path(project, "markdown/doc/raketasks/maintenance.md"))
expect(page).to have_content("bundle exec rake gitlab:env:info RAILS_ENV=production")
@@ -144,7 +142,7 @@ describe "User browses files" do
# rubocop:disable Lint/Void
# Test the full URLs of links instead of relative paths by `have_link(text: "...", href: "...")`.
- find("a", text: /^empty$/)["href"] == project_tree_url(project, "markdown/d")
+ find("a", text: "..")["href"] == project_tree_url(project, "markdown/d")
# rubocop:enable Lint/Void
page.within(".tree-table") do
@@ -168,7 +166,7 @@ describe "User browses files" do
end
end
- context "when browsing a specific ref" do
+ context "when browsing a specific ref", :js do
let(:ref) { project_tree_path(project, "6d39438") }
before do
@@ -180,7 +178,7 @@ describe "User browses files" do
expect(page).to have_content(".gitignore").and have_content("LICENSE")
end
- it "shows files from a repository with apostroph in its name", :js do
+ it "shows files from a repository with apostroph in its name" do
first(".js-project-refs-dropdown").click
page.within(".project-refs-form") do
@@ -191,10 +189,10 @@ describe "User browses files" do
visit(project_tree_path(project, "'test'"))
- expect(page).to have_css(".tree-commit-link").and have_no_content("Loading commit data...")
+ expect(page).not_to have_selector(".tree-commit .animation-container")
end
- it "shows the code with a leading dot in the directory", :js do
+ it "shows the code with a leading dot in the directory" do
first(".js-project-refs-dropdown").click
page.within(".project-refs-form") do
@@ -203,7 +201,7 @@ describe "User browses files" do
visit(project_tree_path(project, "fix/.testdir"))
- expect(page).to have_css(".tree-commit-link").and have_no_content("Loading commit data...")
+ expect(page).not_to have_selector(".tree-commit .animation-container")
end
it "does not show the permalink link" do
@@ -221,7 +219,7 @@ describe "User browses files" do
click_link(".gitignore")
end
- it "shows a file content", :js do
+ it "shows a file content" do
expect(page).to have_content("*.rbc")
end
diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb
index 08ebeed2cdd..618290416bd 100644
--- a/spec/features/projects/files/user_browses_lfs_files_spec.rb
+++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb
@@ -7,8 +7,6 @@ describe 'Projects > Files > User browses LFS files' do
let(:user) { project.owner }
before do
- stub_feature_flags(vue_file_list: false)
-
sign_in(user)
end
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index 19d95c87c6c..b8765066217 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -13,8 +13,6 @@ describe 'Projects > Files > User creates a directory', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(vue_file_list: false)
-
project.add_developer(user)
sign_in(user)
visit project_tree_path(project, 'master')
@@ -71,7 +69,7 @@ describe 'Projects > Files > User creates a directory', :js do
visit(project2_tree_path_root_ref)
end
- it 'creates a directory in a forked project' do
+ it 'creates a directory in a forked project', :sidekiq_might_not_need_inline do
find('.add-to-tree').click
click_link('New directory')
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index 74c037641cd..eb9a4d8cb09 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Projects > Files > User creates files' do
+describe 'Projects > Files > User creates files', :js do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
@@ -14,7 +14,6 @@ describe 'Projects > Files > User creates files' do
let(:user) { create(:user) }
before do
- stub_feature_flags(vue_file_list: false)
stub_feature_flags(web_ide_default: false)
project.add_maintainer(user)
@@ -42,7 +41,7 @@ describe 'Projects > Files > User creates files' do
visit(project2_tree_path_root_ref)
end
- it 'opens new file page on a forked project' do
+ it 'opens new file page on a forked project', :sidekiq_might_not_need_inline do
find('.add-to-tree').click
click_link('New file')
@@ -68,8 +67,7 @@ describe 'Projects > Files > User creates files' do
file_name = find('#file_name')
file_name.set options[:file_name] || 'README.md'
- file_content = find('#file-content', visible: false)
- file_content.set options[:file_content] || 'Some content'
+ find('.ace_text-input', visible: false).send_keys.native.send_keys options[:file_content] || 'Some content'
click_button 'Commit changes'
end
@@ -89,7 +87,7 @@ describe 'Projects > Files > User creates files' do
expect(page).to have_content 'Path cannot include directory traversal'
end
- it 'creates and commit a new file', :js do
+ it 'creates and commit a new file' do
find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:file_name, with: 'not_a_file.md')
@@ -105,7 +103,7 @@ describe 'Projects > Files > User creates files' do
expect(page).to have_content('*.rbca')
end
- it 'creates and commit a new file with new lines at the end of file', :js do
+ it 'creates and commit a new file with new lines at the end of file' do
find('#editor')
execute_script('ace.edit("editor").setValue("Sample\n\n\n")')
fill_in(:file_name, with: 'not_a_file.md')
@@ -122,7 +120,7 @@ describe 'Projects > Files > User creates files' do
expect(evaluate_script('ace.edit("editor").getValue()')).to eq("Sample\n\n\n")
end
- it 'creates and commit a new file with a directory name', :js do
+ it 'creates and commit a new file with a directory name' do
fill_in(:file_name, with: 'foo/bar/baz.txt')
expect(page).to have_selector('.file-editor')
@@ -139,7 +137,7 @@ describe 'Projects > Files > User creates files' do
expect(page).to have_content('*.rbca')
end
- it 'creates and commit a new file specifying a new branch', :js do
+ it 'creates and commit a new file specifying a new branch' do
expect(page).to have_selector('.file-editor')
find('#editor')
@@ -159,7 +157,7 @@ describe 'Projects > Files > User creates files' do
end
end
- context 'when an user does not have write access' do
+ context 'when an user does not have write access', :sidekiq_might_not_need_inline do
before do
project2.add_reporter(user)
visit(project2_tree_path_root_ref)
@@ -174,7 +172,7 @@ describe 'Projects > Files > User creates files' do
expect(page).to have_content(message)
end
- it 'creates and commit new file in forked project', :js do
+ it 'creates and commit new file in forked project' do
expect(page).to have_selector('.file-editor')
find('#editor')
diff --git a/spec/features/projects/files/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb
index fd4783cfb6b..0f543e47631 100644
--- a/spec/features/projects/files/user_deletes_files_spec.rb
+++ b/spec/features/projects/files/user_deletes_files_spec.rb
@@ -14,8 +14,6 @@ describe 'Projects > Files > User deletes files', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(vue_file_list: false)
-
sign_in(user)
end
@@ -47,7 +45,7 @@ describe 'Projects > Files > User deletes files', :js do
wait_for_requests
end
- it 'deletes the file in a forked project', :js do
+ it 'deletes the file in a forked project', :js, :sidekiq_might_not_need_inline do
click_link('.gitignore')
expect(page).to have_content('.gitignore')
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index 56430721ed6..374a7fb7936 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -12,7 +12,6 @@ describe 'Projects > Files > User edits files', :js do
before do
stub_feature_flags(web_ide_default: false)
- stub_feature_flags(vue_file_list: false)
sign_in(user)
end
@@ -136,7 +135,7 @@ describe 'Projects > Files > User edits files', :js do
)
end
- it 'inserts a content of a file in a forked project' do
+ it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
click_link('.gitignore')
click_button('Edit')
@@ -154,7 +153,7 @@ describe 'Projects > Files > User edits files', :js do
expect(evaluate_script('ace.edit("editor").getValue()')).to eq('*.rbca')
end
- it 'opens the Web IDE in a forked project' do
+ it 'opens the Web IDE in a forked project', :sidekiq_might_not_need_inline do
click_link('.gitignore')
click_button('Web IDE')
@@ -168,7 +167,7 @@ describe 'Projects > Files > User edits files', :js do
expect(page).to have_css('.ide .multi-file-tab', text: '.gitignore')
end
- it 'commits an edited file in a forked project' do
+ it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
click_link('.gitignore')
find('.js-edit-blob').click
@@ -199,7 +198,7 @@ describe 'Projects > Files > User edits files', :js do
wait_for_requests
end
- it 'links to the forked project for editing' do
+ it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
click_link('.gitignore')
find('.js-edit-blob').click
diff --git a/spec/features/projects/files/user_reads_pipeline_status_spec.rb b/spec/features/projects/files/user_reads_pipeline_status_spec.rb
index 15f8fa7438d..9d38c44b6ef 100644
--- a/spec/features/projects/files/user_reads_pipeline_status_spec.rb
+++ b/spec/features/projects/files/user_reads_pipeline_status_spec.rb
@@ -9,8 +9,6 @@ describe 'user reads pipeline status', :js do
let(:x110_pipeline) { create_pipeline('x1.1.0', 'failed') }
before do
- stub_feature_flags(vue_file_list: false)
-
project.add_maintainer(user)
project.repository.add_tag(user, 'x1.1.0', 'v1.1.0')
@@ -25,7 +23,7 @@ describe 'user reads pipeline status', :js do
visit project_tree_path(project, expected_pipeline.ref)
wait_for_requests
- page.within('.blob-commit-info') do
+ page.within('.commit-detail') do
expect(page).to have_link('', href: project_pipeline_path(project, expected_pipeline))
expect(page).to have_selector(".ci-status-icon-#{expected_pipeline.status}")
end
diff --git a/spec/features/projects/files/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb
index d50bc0a7d18..4c54bbdcd67 100644
--- a/spec/features/projects/files/user_replaces_files_spec.rb
+++ b/spec/features/projects/files/user_replaces_files_spec.rb
@@ -16,8 +16,6 @@ describe 'Projects > Files > User replaces files', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(vue_file_list: false)
-
sign_in(user)
end
@@ -55,7 +53,7 @@ describe 'Projects > Files > User replaces files', :js do
wait_for_requests
end
- it 'replaces an existed file with a new one in a forked project' do
+ it 'replaces an existed file with a new one in a forked project', :sidekiq_might_not_need_inline do
click_link('.gitignore')
expect(page).to have_content('.gitignore')
diff --git a/spec/features/projects/files/user_uploads_files_spec.rb b/spec/features/projects/files/user_uploads_files_spec.rb
index 74b5d7c5041..35a3835ff12 100644
--- a/spec/features/projects/files/user_uploads_files_spec.rb
+++ b/spec/features/projects/files/user_uploads_files_spec.rb
@@ -16,8 +16,6 @@ describe 'Projects > Files > User uploads files' do
let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
before do
- stub_feature_flags(vue_file_list: false)
-
project.add_maintainer(user)
sign_in(user)
end
@@ -76,7 +74,7 @@ describe 'Projects > Files > User uploads files' do
visit(project2_tree_path_root_ref)
end
- it 'uploads and commit a new file to a forked project', :js do
+ it 'uploads and commit a new file to a forked project', :js, :sidekiq_might_not_need_inline do
find('.add-to-tree').click
click_link('Upload file')
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 6792a6e2af0..0f97032eefa 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -27,7 +27,7 @@ describe 'Project fork' do
expect(page).to have_css('a.disabled', text: 'Fork')
end
- it 'forks the project' do
+ it 'forks the project', :sidekiq_might_not_need_inline do
visit project_path(project)
click_link 'Fork'
@@ -174,7 +174,7 @@ describe 'Project fork' do
expect(page).to have_css('.fork-thumbnail.disabled')
end
- it 'links to the fork if the project was already forked within that namespace' do
+ it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline do
forked_project = fork_project(project, user, namespace: group, repository: true)
visit new_project_fork_path(project)
diff --git a/spec/features/projects/forks/fork_list_spec.rb b/spec/features/projects/forks/fork_list_spec.rb
index 2dbe3d90bad..3b63d9a4c2d 100644
--- a/spec/features/projects/forks/fork_list_spec.rb
+++ b/spec/features/projects/forks/fork_list_spec.rb
@@ -15,7 +15,7 @@ describe 'listing forks of a project' do
sign_in(user)
end
- it 'shows the forked project in the list with commit as description' do
+ it 'shows the forked project in the list with commit as description', :sidekiq_might_not_need_inline do
visit project_forks_path(source)
page.within('li.project-row') do
diff --git a/spec/features/projects/graph_spec.rb b/spec/features/projects/graph_spec.rb
index 6082eb03374..5dabaf20952 100644
--- a/spec/features/projects/graph_spec.rb
+++ b/spec/features/projects/graph_spec.rb
@@ -29,12 +29,6 @@ describe 'Project Graph', :js do
end
end
- it 'renders graphs' do
- visit project_graph_path(project, 'master')
-
- expect(page).to have_selector('.stat-graph', visible: false)
- end
-
context 'commits graph' do
before do
visit commits_project_graph_path(project, 'master')
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index 7618a2bdea3..c15a3250221 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -26,7 +26,9 @@ describe 'Import/Export - project export integration test', :js do
let(:project) { setup_project }
before do
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow_next_instance_of(Gitlab::ImportExport) do |instance|
+ allow(instance).to receive(:storage_path).and_return(export_path)
+ end
end
after do
@@ -38,7 +40,7 @@ describe 'Import/Export - project export integration test', :js do
sign_in(user)
end
- it 'exports a project successfully' do
+ it 'exports a project successfully', :sidekiq_might_not_need_inline do
visit edit_project_path(project)
expect(page).to have_content('Export project')
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index 6f96da60a31..33c7182c084 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -11,7 +11,9 @@ describe 'Import/Export - project import integration test', :js do
before do
stub_uploads_object_storage(FileUploader)
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow_next_instance_of(Gitlab::ImportExport) do |instance|
+ allow(instance).to receive(:storage_path).and_return(export_path)
+ end
gitlab_sign_in(user)
end
@@ -27,7 +29,7 @@ describe 'Import/Export - project import integration test', :js do
let(:project_path) { 'test-project-name' + randomHex }
context 'prefilled the path' do
- it 'user imports an exported project successfully' do
+ it 'user imports an exported project successfully', :sidekiq_might_not_need_inline do
visit new_project_path
fill_in :project_name, with: project_name, visible: true
@@ -53,7 +55,7 @@ describe 'Import/Export - project import integration test', :js do
end
context 'path is not prefilled' do
- it 'user imports an exported project successfully' do
+ it 'user imports an exported project successfully', :sidekiq_might_not_need_inline do
visit new_project_path
click_import_project_tab
click_link 'GitLab export'
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index f5d5bc7f5b9..c9568dbb7ce 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -166,7 +166,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:source_project) { fork_project(project, user, repository: true) }
let(:target_project) { project }
- it 'shows merge request iid and source branch' do
+ it 'shows merge request iid and source branch', :sidekiq_might_not_need_inline do
visit project_job_path(source_project, job)
within '.js-pipeline-info' do
@@ -214,7 +214,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:source_project) { fork_project(project, user, repository: true) }
let(:target_project) { project }
- it 'shows merge request iid and source branch' do
+ it 'shows merge request iid and source branch', :sidekiq_might_not_need_inline do
visit project_job_path(source_project, job)
within '.js-pipeline-info' do
diff --git a/spec/features/projects/labels/search_labels_spec.rb b/spec/features/projects/labels/search_labels_spec.rb
index 2d5a138c3cc..e2eec7400ff 100644
--- a/spec/features/projects/labels/search_labels_spec.rb
+++ b/spec/features/projects/labels/search_labels_spec.rb
@@ -68,7 +68,7 @@ describe 'Search for labels', :js do
find('#label-search').native.send_keys(:enter)
page.within('.prioritized-labels') do
- expect(page).to have_content('No prioritised labels with such name or description')
+ expect(page).to have_content('No prioritized labels with such name or description')
end
page.within('.other-labels') do
diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb
index fb1165838c7..cb7a405e821 100644
--- a/spec/features/projects/members/member_leaves_project_spec.rb
+++ b/spec/features/projects/members/member_leaves_project_spec.rb
@@ -20,7 +20,7 @@ describe 'Projects > Members > Member leaves project' do
expect(project.users.exists?(user.id)).to be_falsey
end
- it 'user leaves project by url param', :js do
+ it 'user leaves project by url param', :js, :quarantine do
visit project_path(project, leave: 1)
page.accept_confirm
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index a77f0bdcbe9..7e7faca9741 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -26,7 +26,6 @@ describe 'Projects > Members > User requests access', :js do
expect(ActionMailer::Base.deliveries.last.subject).to eq "Request to join the #{project.full_name} project"
expect(project.requesters.exists?(user_id: user)).to be_truthy
- expect(page).to have_content 'Your request for access has been queued for review.'
expect(page).to have_content 'Withdraw Access Request'
expect(page).not_to have_content 'Leave Project'
@@ -64,7 +63,6 @@ describe 'Projects > Members > User requests access', :js do
accept_confirm { click_link 'Withdraw Access Request' }
- expect(page).to have_content 'Your access request to the project has been withdrawn.'
expect(page).not_to have_content 'Withdraw Access Request'
expect(page).to have_content 'Request Access'
end
diff --git a/spec/features/projects/milestones/milestone_spec.rb b/spec/features/projects/milestones/milestone_spec.rb
index 5e94b2f721e..fb9667cd67d 100644
--- a/spec/features/projects/milestones/milestone_spec.rb
+++ b/spec/features/projects/milestones/milestone_spec.rb
@@ -7,6 +7,18 @@ describe 'Project milestone' do
let(:project) { create(:project, name: 'test', namespace: user.namespace) }
let(:milestone) { create(:milestone, project: project) }
+ def toggle_sidebar
+ find('.milestone-sidebar .gutter-toggle').click
+ end
+
+ def sidebar_release_block
+ find('.milestone-sidebar .block.releases')
+ end
+
+ def sidebar_release_block_collapsed_icon
+ find('.milestone-sidebar .block.releases .sidebar-collapsed-icon')
+ end
+
before do
sign_in(user)
end
@@ -39,15 +51,16 @@ describe 'Project milestone' do
context 'when project has disabled issues' do
before do
+ create(:issue, project: project, milestone: milestone)
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
+
visit project_milestone_path(project, milestone)
end
- it 'hides issues tab' do
+ it 'does not show any issues under the issues tab' do
within('#content-body') do
- expect(page).not_to have_link 'Issues', href: '#tab-issues'
- expect(page).to have_selector '.nav-links li a.active', count: 1
- expect(find('.nav-links li a.active')).to have_content 'Merge Requests'
+ expect(find('.nav-links li a.active')).to have_content 'Issues'
+ expect(page).not_to have_selector '.issuable-row'
end
end
@@ -75,17 +88,96 @@ describe 'Project milestone' do
describe 'the collapsed sidebar' do
before do
- find('.milestone-sidebar .gutter-toggle').click
+ toggle_sidebar
end
it 'shows the total MR and issue counts' do
find('.milestone-sidebar .block', match: :first)
aggregate_failures 'MR and issue blocks' do
- expect(find('.milestone-sidebar .block.issues')).to have_content 1
- expect(find('.milestone-sidebar .block.merge-requests')).to have_content 0
+ expect(find('.milestone-sidebar .block.issues')).to have_content '1'
+ expect(find('.milestone-sidebar .block.merge-requests')).to have_content '0'
end
end
end
end
+
+ context 'when the milestone is not associated with a release' do
+ before do
+ visit project_milestone_path(project, milestone)
+ end
+
+ it 'shows "None" in the "Releases" section' do
+ expect(sidebar_release_block).to have_content 'Releases None'
+ end
+
+ describe 'when the sidebar is collapsed' do
+ before do
+ toggle_sidebar
+ end
+
+ it 'shows "0" in the "Releases" section' do
+ expect(sidebar_release_block).to have_content '0'
+ end
+
+ it 'has a tooltip that reads "Releases"' do
+ expect(sidebar_release_block_collapsed_icon['title']).to eq 'Releases'
+ end
+ end
+ end
+
+ context 'when the milestone is associated with one release' do
+ before do
+ create(:release, project: project, name: 'Version 5', milestones: [milestone])
+
+ visit project_milestone_path(project, milestone)
+ end
+
+ it 'shows "Version 5" in the "Release" section' do
+ expect(sidebar_release_block).to have_content 'Release Version 5'
+ end
+
+ describe 'when the sidebar is collapsed' do
+ before do
+ toggle_sidebar
+ end
+
+ it 'shows "1" in the "Releases" section' do
+ expect(sidebar_release_block).to have_content '1'
+ end
+
+ it 'has a tooltip that reads "1 release"' do
+ expect(sidebar_release_block_collapsed_icon['title']).to eq '1 release'
+ end
+ end
+ end
+
+ context 'when the milestone is associated with multiple releases' do
+ before do
+ (5..10).each do |num|
+ released_at = Time.zone.parse('2019-10-04') + num.months
+ create(:release, project: project, name: "Version #{num}", milestones: [milestone], released_at: released_at)
+ end
+
+ visit project_milestone_path(project, milestone)
+ end
+
+ it 'shows a shortened list of releases in the "Releases" section' do
+ expect(sidebar_release_block).to have_content 'Releases Version 10 • Version 9 • Version 8 • 3 more releases'
+ end
+
+ describe 'when the sidebar is collapsed' do
+ before do
+ toggle_sidebar
+ end
+
+ it 'shows "6" in the "Releases" section' do
+ expect(sidebar_release_block).to have_content '6'
+ end
+
+ it 'has a tooltip that reads "6 releases"' do
+ expect(sidebar_release_block_collapsed_icon['title']).to eq '6 releases'
+ end
+ end
+ end
end
diff --git a/spec/features/projects/pages_lets_encrypt_spec.rb b/spec/features/projects/pages_lets_encrypt_spec.rb
index 8b5964b2eee..d09014e915d 100644
--- a/spec/features/projects/pages_lets_encrypt_spec.rb
+++ b/spec/features/projects/pages_lets_encrypt_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe "Pages with Let's Encrypt", :https_pages_enabled do
include LetsEncryptHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, pages_https_only: false) }
let(:user) { create(:user) }
let(:role) { :maintainer }
let(:certificate_pem) { attributes_for(:pages_domain)[:certificate] }
@@ -18,7 +18,21 @@ describe "Pages with Let's Encrypt", :https_pages_enabled do
project.add_role(user, role)
sign_in(user)
project.namespace.update(owner: user)
- allow_any_instance_of(Project).to receive(:pages_deployed?) { true }
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:pages_deployed?) { true }
+ end
+ end
+
+ it "creates new domain with Let's Encrypt enabled by default" do
+ visit new_project_pages_domain_path(project)
+
+ fill_in 'Domain', with: 'my.test.domain.com'
+
+ expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'true'
+ click_button 'Create New Domain'
+
+ expect(page).to have_content('my.test.domain.com')
+ expect(PagesDomain.find_by_domain('my.test.domain.com').auto_ssl_enabled).to eq(true)
end
context 'when the auto SSL management is initially disabled' do
@@ -32,14 +46,14 @@ describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(domain.auto_ssl_enabled).to eq false
expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'false'
- expect(page).to have_field 'Certificate (PEM)', type: 'textarea'
- expect(page).to have_field 'Key (PEM)', type: 'textarea'
+ expect(page).to have_selector '.card-header', text: 'Certificate'
+ expect(page).to have_text domain.subject
find('.js-auto-ssl-toggle-container .project-feature-toggle').click
expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'true'
- expect(page).not_to have_field 'Certificate (PEM)', type: 'textarea'
- expect(page).not_to have_field 'Key (PEM)', type: 'textarea'
+ expect(page).not_to have_selector '.card-header', text: 'Certificate'
+ expect(page).not_to have_text domain.subject
click_on 'Save Changes'
@@ -65,9 +79,6 @@ describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(page).to have_field 'Certificate (PEM)', type: 'textarea'
expect(page).to have_field 'Key (PEM)', type: 'textarea'
- fill_in 'Certificate (PEM)', with: certificate_pem
- fill_in 'Key (PEM)', with: certificate_key
-
click_on 'Save Changes'
expect(domain.reload.auto_ssl_enabled).to eq false
@@ -79,7 +90,8 @@ describe "Pages with Let's Encrypt", :https_pages_enabled do
it 'user do not see private key' do
visit edit_project_pages_domain_path(project, domain)
- expect(find_field('Key (PEM)', visible: :all, disabled: :all).value).to be_blank
+ expect(page).not_to have_selector '.card-header', text: 'Certificate'
+ expect(page).not_to have_text domain.subject
end
end
@@ -96,12 +108,23 @@ describe "Pages with Let's Encrypt", :https_pages_enabled do
end
context 'when certificate is provided by user' do
- let(:domain) { create(:pages_domain, project: project) }
+ let(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: false) }
+
+ it 'user sees certificate subject' do
+ visit edit_project_pages_domain_path(project, domain)
+
+ expect(page).to have_selector '.card-header', text: 'Certificate'
+ expect(page).to have_text domain.subject
+ end
- it 'user sees private key' do
+ it 'user can delete the certificate', :js do
visit edit_project_pages_domain_path(project, domain)
- expect(find_field('Key (PEM)').value).not_to be_blank
+ expect(page).to have_selector '.card-header', text: 'Certificate'
+ expect(page).to have_text domain.subject
+ within('.card') { accept_confirm { click_on 'Remove' } }
+ expect(page).to have_field 'Certificate (PEM)', with: ''
+ expect(page).to have_field 'Key (PEM)', with: ''
end
end
end
diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb
index d55e9d12801..3c4b5b2c4ca 100644
--- a/spec/features/projects/pages_spec.rb
+++ b/spec/features/projects/pages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
shared_examples 'pages settings editing' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project, pages_https_only: false) }
let(:user) { create(:user) }
let(:role) { :maintainer }
@@ -30,12 +30,52 @@ shared_examples 'pages settings editing' do
expect(page).to have_content('Access pages')
end
+ context 'when pages are disabled in the project settings' do
+ it 'renders disabled warning' do
+ project.project_feature.update!(pages_access_level: ProjectFeature::DISABLED)
+
+ visit project_pages_path(project)
+
+ expect(page).to have_content('GitLab Pages are disabled for this project')
+ end
+ end
+
it 'renders first deployment warning' do
visit project_pages_path(project)
expect(page).to have_content('It may take up to 30 minutes before the site is available after the first deployment.')
end
+ shared_examples 'does not render access control warning' do
+ it 'does not render access control warning' do
+ visit project_pages_path(project)
+
+ expect(page).not_to have_content('Access Control is enabled for this Pages website')
+ end
+ end
+
+ include_examples 'does not render access control warning'
+
+ context 'when access control is enabled in gitlab settings' do
+ before do
+ stub_pages_setting(access_control: true)
+ end
+
+ it 'renders access control warning' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Access Control is enabled for this Pages website')
+ end
+
+ context 'when pages are public' do
+ before do
+ project.project_feature.update!(pages_access_level: ProjectFeature::PUBLIC)
+ end
+
+ include_examples 'does not render access control warning'
+ end
+ end
+
context 'when support for external domains is disabled' do
it 'renders message that support is disabled' do
visit project_pages_path(project)
@@ -93,7 +133,7 @@ shared_examples 'pages settings editing' do
end
end
- context 'when pages are exposed on external HTTPS address', :https_pages_enabled do
+ context 'when pages are exposed on external HTTPS address', :https_pages_enabled, :js do
let(:certificate_pem) do
<<~PEM
-----BEGIN CERTIFICATE-----
@@ -138,6 +178,11 @@ shared_examples 'pages settings editing' do
visit new_project_pages_domain_path(project)
fill_in 'Domain', with: 'my.test.domain.com'
+
+ if ::Gitlab::LetsEncrypt.enabled?
+ find('.js-auto-ssl-toggle-container .project-feature-toggle').click
+ end
+
fill_in 'Certificate (PEM)', with: certificate_pem
fill_in 'Key (PEM)', with: certificate_key
click_button 'Create New Domain'
@@ -145,27 +190,49 @@ shared_examples 'pages settings editing' do
expect(page).to have_content('my.test.domain.com')
end
+ describe 'with dns verification enabled' do
+ before do
+ stub_application_setting(pages_domain_verification_enabled: true)
+ end
+
+ it 'shows the DNS verification record' do
+ domain = create(:pages_domain, project: project)
+
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+ expect(page).to have_field :domain_verification, with: "#{domain.verification_domain} TXT #{domain.keyed_verification_code}"
+ end
+ end
+
describe 'updating the certificate for an existing domain' do
let!(:domain) do
- create(:pages_domain, project: project)
+ create(:pages_domain, project: project, auto_ssl_enabled: false)
end
it 'allows the certificate to be updated' do
visit project_pages_path(project)
- within('#content-body') { click_link 'Details' }
- click_link 'Edit'
+ within('#content-body') { click_link 'Edit' }
click_button 'Save Changes'
expect(page).to have_content('Domain was updated')
end
context 'when the certificate is invalid' do
+ let!(:domain) do
+ create(:pages_domain, :without_certificate, :without_key, project: project)
+ end
+
it 'tells the user what the problem is' do
visit project_pages_path(project)
- within('#content-body') { click_link 'Details' }
- click_link 'Edit'
+ within('#content-body') { click_link 'Edit' }
+
+ if ::Gitlab::LetsEncrypt.enabled?
+ find('.js-auto-ssl-toggle-container .project-feature-toggle').click
+ end
+
fill_in 'Certificate (PEM)', with: 'invalid data'
click_button 'Save Changes'
@@ -174,6 +241,27 @@ shared_examples 'pages settings editing' do
expect(page).to have_content("Key doesn't match the certificate")
end
end
+
+ it 'allows the certificate to be removed', :js do
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+
+ accept_confirm { click_link 'Remove' }
+
+ expect(page).to have_field('Certificate (PEM)', with: '')
+ expect(page).to have_field('Key (PEM)', with: '')
+ domain.reload
+ expect(domain.certificate).to be_nil
+ expect(domain.key).to be_nil
+ end
+
+ it 'shows the DNS CNAME record' do
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+ expect(page).to have_field :domain_dns, with: "#{domain.domain} CNAME #{domain.project.pages_subdomain}.#{Settings.pages.host}."
+ end
end
end
end
@@ -210,7 +298,7 @@ shared_examples 'pages settings editing' do
end
end
- describe 'HTTPS settings', :js, :https_pages_enabled do
+ describe 'HTTPS settings', :https_pages_enabled do
before do
project.namespace.update(owner: user)
@@ -318,18 +406,21 @@ shared_examples 'pages settings editing' do
expect(page).to have_link('Remove pages')
- click_link 'Remove pages'
+ accept_confirm { click_link 'Remove pages' }
- expect(project.pages_deployed?).to be_falsey
+ expect(page).to have_content('Pages were removed')
+ expect(project.reload.pages_deployed?).to be_falsey
end
end
end
end
-describe 'Pages' do
+describe 'Pages', :js do
include LetsEncryptHelpers
- include_examples 'pages settings editing'
+ context 'when editing normally' do
+ include_examples 'pages settings editing'
+ end
context 'when letsencrypt support is enabled' do
before do
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 04adb1ec6af..94fac9a2eb5 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -128,7 +128,7 @@ describe 'Pipeline', :js do
end
end
- it 'cancels the running build and shows retry button' do
+ it 'cancels the running build and shows retry button', :sidekiq_might_not_need_inline do
find('#ci-badge-deploy .ci-action-icon-container').click
page.within('#ci-badge-deploy') do
@@ -146,7 +146,7 @@ describe 'Pipeline', :js do
end
end
- it 'cancels the preparing build and shows retry button' do
+ it 'cancels the preparing build and shows retry button', :sidekiq_might_not_need_inline do
find('#ci-badge-deploy .ci-action-icon-container').click
page.within('#ci-badge-deploy') do
@@ -186,7 +186,7 @@ describe 'Pipeline', :js do
end
end
- it 'unschedules the delayed job and shows play button as a manual job' do
+ it 'unschedules the delayed job and shows play button as a manual job', :sidekiq_might_not_need_inline do
find('#ci-badge-delayed-job .ci-action-icon-container').click
page.within('#ci-badge-delayed-job') do
@@ -305,7 +305,9 @@ describe 'Pipeline', :js do
find('.js-retry-button').click
end
- it { expect(page).not_to have_content('Retry') }
+ it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
+ expect(page).not_to have_content('Retry')
+ end
end
end
@@ -321,7 +323,9 @@ describe 'Pipeline', :js do
click_on 'Cancel running'
end
- it { expect(page).not_to have_content('Cancel running') }
+ it 'does not show a "Cancel running" button', :sidekiq_might_not_need_inline do
+ expect(page).not_to have_content('Cancel running')
+ end
end
end
@@ -400,7 +404,7 @@ describe 'Pipeline', :js do
visit project_pipeline_path(source_project, pipeline)
end
- it 'shows the pipeline information' do
+ it 'shows the pipeline information', :sidekiq_might_not_need_inline do
within '.pipeline-info' do
expect(page).to have_content("#{pipeline.statuses.count} jobs " \
"for !#{merge_request.iid} " \
@@ -473,7 +477,7 @@ describe 'Pipeline', :js do
visit project_pipeline_path(source_project, pipeline)
end
- it 'shows the pipeline information' do
+ it 'shows the pipeline information', :sidekiq_might_not_need_inline do
within '.pipeline-info' do
expect(page).to have_content("#{pipeline.statuses.count} jobs " \
"for !#{merge_request.iid} " \
@@ -651,7 +655,9 @@ describe 'Pipeline', :js do
find('.js-retry-button').click
end
- it { expect(page).not_to have_content('Retry') }
+ it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
+ expect(page).not_to have_content('Retry')
+ end
end
end
@@ -663,7 +669,9 @@ describe 'Pipeline', :js do
click_on 'Cancel running'
end
- it { expect(page).not_to have_content('Cancel running') }
+ it 'does not show a "Cancel running" button', :sidekiq_might_not_need_inline do
+ expect(page).not_to have_content('Cancel running')
+ end
end
end
@@ -778,10 +786,10 @@ describe 'Pipeline', :js do
expect(page).to have_content(failed_build.stage)
end
- it 'does not show trace' do
+ it 'does not show log' do
subject
- expect(page).to have_content('No job trace')
+ expect(page).to have_content('No job log')
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 76d8ad1638b..f6eeb8d7065 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -133,14 +133,14 @@ describe 'Pipelines', :js do
wait_for_requests
end
- it 'indicated that pipelines was canceled' do
+ it 'indicated that pipelines was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
expect(page).to have_selector('.ci-canceled')
end
end
end
- context 'when pipeline is retryable' do
+ context 'when pipeline is retryable', :sidekiq_might_not_need_inline do
let!(:build) do
create(:ci_build, pipeline: pipeline,
stage: 'test')
@@ -185,33 +185,29 @@ describe 'Pipelines', :js do
visit project_pipelines_path(source_project)
end
- shared_examples_for 'showing detached merge request pipeline information' do
- it 'shows detached tag for the pipeline' do
+ shared_examples_for 'detached merge request pipeline' do
+ it 'shows pipeline information without pipeline ref', :sidekiq_might_not_need_inline do
within '.pipeline-tags' do
expect(page).to have_content('detached')
end
- end
- it 'shows the link of the merge request' do
within '.branch-commit' do
expect(page).to have_link(merge_request.iid,
href: project_merge_request_path(project, merge_request))
end
- end
- it 'does not show the ref of the pipeline' do
within '.branch-commit' do
expect(page).not_to have_link(pipeline.ref)
end
end
end
- it_behaves_like 'showing detached merge request pipeline information'
+ it_behaves_like 'detached merge request pipeline'
context 'when source project is a forked project' do
let(:source_project) { fork_project(project, user, repository: true) }
- it_behaves_like 'showing detached merge request pipeline information'
+ it_behaves_like 'detached merge request pipeline'
end
end
@@ -233,20 +229,16 @@ describe 'Pipelines', :js do
end
shared_examples_for 'Correct merge request pipeline information' do
- it 'does not show detached tag for the pipeline' do
+ it 'does not show detached tag for the pipeline, and shows the link of the merge request, and does not show the ref of the pipeline', :sidekiq_might_not_need_inline do
within '.pipeline-tags' do
expect(page).not_to have_content('detached')
end
- end
- it 'shows the link of the merge request' do
within '.branch-commit' do
expect(page).to have_link(merge_request.iid,
href: project_merge_request_path(project, merge_request))
end
- end
- it 'does not show the ref of the pipeline' do
within '.branch-commit' do
expect(page).not_to have_link(pipeline.ref)
end
@@ -429,7 +421,7 @@ describe 'Pipelines', :js do
find('.js-modal-primary-action').click
end
- it 'indicates that pipeline was canceled' do
+ it 'indicates that pipeline was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
expect(page).to have_selector('.ci-canceled')
end
@@ -452,7 +444,7 @@ describe 'Pipelines', :js do
expect(page).not_to have_selector('.js-pipelines-retry-button')
end
- it 'has failed pipeline' do
+ it 'has failed pipeline', :sidekiq_might_not_need_inline do
expect(page).to have_selector('.ci-failed')
end
end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index d96e243d96b..9bbeb0eb260 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -102,5 +102,30 @@ describe 'Projects > Settings > For a forked project', :js do
end
end
end
+
+ context 'grafana integration settings form' do
+ it 'successfully fills and completes the form' do
+ visit project_settings_operations_path(project)
+
+ wait_for_requests
+
+ within '.js-grafana-integration' do
+ click_button('Expand')
+ end
+
+ expect(page).to have_content('Grafana URL')
+ expect(page).to have_content('API Token')
+ expect(page).to have_button('Save Changes')
+
+ fill_in('grafana-url', with: 'http://gitlab-test.grafana.net')
+ fill_in('grafana-token', with: 'token')
+
+ click_button('Save Changes')
+
+ wait_for_requests
+
+ assert_text('Your changes have been saved')
+ end
+ end
end
end
diff --git a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
index 9f09c5c4501..c0089e3c28c 100644
--- a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
@@ -107,4 +107,27 @@ describe 'Projects > Settings > User manages merge request settings' do
expect(project.printing_merge_request_link_enabled).to be(false)
end
end
+
+ describe 'Checkbox to remove source branch after merge', :js do
+ it 'is initially checked' do
+ checkbox = find_field('project_remove_source_branch_after_merge')
+ expect(checkbox).to be_checked
+ end
+
+ it 'when unchecked sets :remove_source_branch_after_merge to false' do
+ uncheck('project_remove_source_branch_after_merge')
+ within('.merge-request-settings-form') do
+ find('.qa-save-merge-request-changes')
+ click_on('Save changes')
+ end
+
+ find('.flash-notice')
+ checkbox = find_field('project_remove_source_branch_after_merge')
+
+ expect(checkbox).not_to be_checked
+
+ project.reload
+ expect(project.remove_source_branch_after_merge).to be(false)
+ end
+ end
end
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index bbb3a066ed5..ff133b58f89 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -2,12 +2,11 @@
require 'spec_helper'
-describe 'Projects > Show > Collaboration links' do
+describe 'Projects > Show > Collaboration links', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
- stub_feature_flags(vue_file_list: false)
project.add_developer(user)
sign_in(user)
end
@@ -17,15 +16,21 @@ describe 'Projects > Show > Collaboration links' do
# The navigation bar
page.within('.header-new') do
+ find('.qa-new-menu-toggle').click
+
aggregate_failures 'dropdown links in the navigation bar' do
expect(page).to have_link('New issue')
expect(page).to have_link('New merge request')
expect(page).to have_link('New snippet', href: new_project_snippet_path(project))
end
+
+ find('.qa-new-menu-toggle').click
end
# The dropdown above the tree
page.within('.repo-breadcrumb') do
+ find('.qa-add-to-tree').click
+
aggregate_failures 'dropdown links above the repo tree' do
expect(page).to have_link('New file')
expect(page).to have_link('Upload file')
@@ -45,23 +50,19 @@ describe 'Projects > Show > Collaboration links' do
visit project_path(project)
page.within('.header-new') do
+ find('.qa-new-menu-toggle').click
+
aggregate_failures 'dropdown links' do
expect(page).not_to have_link('New issue')
expect(page).not_to have_link('New merge request')
expect(page).not_to have_link('New snippet', href: new_project_snippet_path(project))
end
- end
- page.within('.repo-breadcrumb') do
- aggregate_failures 'dropdown links' do
- expect(page).not_to have_link('New file')
- expect(page).not_to have_link('Upload file')
- expect(page).not_to have_link('New directory')
- expect(page).not_to have_link('New branch')
- expect(page).not_to have_link('New tag')
- end
+ find('.qa-new-menu-toggle').click
end
+ expect(page).not_to have_selector('.qa-add-to-tree')
+
expect(page).not_to have_link('Web IDE')
end
end
diff --git a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
index fdc238d55cf..cf1a679102c 100644
--- a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
+++ b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
@@ -5,10 +5,6 @@ require 'spec_helper'
describe 'Projects > Show > User sees last commit CI status' do
set(:project) { create(:project, :repository, :public) }
- before do
- stub_feature_flags(vue_file_list: false)
- end
-
it 'shows the project README', :js do
project.enable_ci
pipeline = create(:ci_pipeline, project: project, sha: project.commit.sha, ref: 'master')
@@ -16,9 +12,9 @@ describe 'Projects > Show > User sees last commit CI status' do
visit project_path(project)
- page.within '.blob-commit-info' do
+ page.within '.commit-detail' do
expect(page).to have_content(project.commit.sha[0..6])
- expect(page).to have_link('Pipeline: skipped')
+ expect(page).to have_selector('[aria-label="Commit: skipped"]')
end
end
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index c136d7607fd..41c3c6b5770 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -59,8 +59,8 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
end
it '"Add license" button linked to new file populated for a license' do
- page.within('.project-stats') do
- expect(page).to have_link('Add license', href: presenter.add_license_path)
+ page.within('.project-buttons') do
+ expect(page).to have_link('Add LICENSE', href: presenter.add_license_path)
end
end
end
@@ -175,7 +175,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
expect(project.repository.license_blob).not_to be_nil
page.within('.project-buttons') do
- expect(page).not_to have_link('Add license')
+ expect(page).not_to have_link('Add LICENSE')
end
end
diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb
index ca616be341d..180ffac4d4d 100644
--- a/spec/features/projects/tree/tree_show_spec.rb
+++ b/spec/features/projects/tree/tree_show_spec.rb
@@ -10,7 +10,6 @@ describe 'Projects tree', :js do
let(:test_sha) { '7975be0116940bf2ad4321f79d02a55c5f7779aa' }
before do
- stub_feature_flags(vue_file_list: false)
project.add_maintainer(user)
sign_in(user)
end
diff --git a/spec/features/projects/view_on_env_spec.rb b/spec/features/projects/view_on_env_spec.rb
index beb32104809..832985f1a30 100644
--- a/spec/features/projects/view_on_env_spec.rb
+++ b/spec/features/projects/view_on_env_spec.rb
@@ -9,9 +9,13 @@ describe 'View on environment', :js do
let(:user) { project.creator }
before do
+ stub_feature_flags(single_mr_diff_view: false)
+
project.add_maintainer(user)
end
+ it_behaves_like 'rendering a single diff version'
+
context 'when the branch has a route map' do
let(:route_map) do
<<-MAP.strip_heredoc
@@ -26,7 +30,7 @@ describe 'View on environment', :js do
user,
start_branch: branch_name,
branch_name: branch_name,
- commit_message: "Add .gitlab/route-map.yml",
+ commit_message: 'Add .gitlab/route-map.yml',
file_path: '.gitlab/route-map.yml',
file_content: route_map
).execute
@@ -37,9 +41,9 @@ describe 'View on environment', :js do
user,
start_branch: branch_name,
branch_name: branch_name,
- commit_message: "Update feature",
+ commit_message: 'Update feature',
file_path: file_path,
- file_content: "# Noop"
+ file_content: '# Noop'
).execute
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 67ae26d8d1e..90e48f3c230 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -6,10 +6,6 @@ describe 'Project' do
include ProjectForksHelper
include MobileHelpers
- before do
- stub_feature_flags(vue_file_list: false)
- end
-
describe 'creating from template' do
let(:user) { create(:user) }
let(:template) { Gitlab::ProjectTemplate.find(:rails) }
@@ -190,7 +186,7 @@ describe 'Project' do
sign_in user
end
- it 'shows a link to the source project when it is available' do
+ it 'shows a link to the source project when it is available', :sidekiq_might_not_need_inline do
visit project_path(forked_project)
expect(page).to have_content('Forked from')
@@ -206,7 +202,7 @@ describe 'Project' do
expect(page).not_to have_content('Forked from')
end
- it 'shows the name of the deleted project when the source was deleted' do
+ it 'shows the name of the deleted project when the source was deleted', :sidekiq_might_not_need_inline do
forked_project
Projects::DestroyService.new(base_project, base_project.owner).execute
@@ -218,7 +214,7 @@ describe 'Project' do
context 'a fork of a fork' do
let(:fork_of_fork) { fork_project(forked_project, user, repository: true) }
- it 'links to the base project if the source project is removed' do
+ it 'links to the base project if the source project is removed', :sidekiq_might_not_need_inline do
fork_of_fork
Projects::DestroyService.new(forked_project, user).execute
@@ -263,7 +259,7 @@ describe 'Project' do
expect(page).to have_selector '#confirm_name_input:focus'
end
- it 'removes a project' do
+ it 'removes a project', :sidekiq_might_not_need_inline do
expect { remove_with_confirm('Remove project', project.path) }.to change { Project.count }.by(-1)
expect(page).to have_content "Project '#{project.full_name}' is in the process of being deleted."
expect(Project.all.count).to be_zero
@@ -272,7 +268,7 @@ describe 'Project' do
end
end
- describe 'tree view (default view is set to Files)' do
+ describe 'tree view (default view is set to Files)', :js do
let(:user) { create(:user, project_view: 'files') }
let(:project) { create(:forked_project_with_submodules) }
@@ -285,19 +281,19 @@ describe 'Project' do
it 'has working links to files' do
click_link('PROCESS.md')
- expect(page.status_code).to eq(200)
+ expect(page).to have_selector('.file-holder')
end
it 'has working links to directories' do
click_link('encoding')
- expect(page.status_code).to eq(200)
+ expect(page).to have_selector('.breadcrumb-item', text: 'encoding')
end
it 'has working links to submodules' do
click_link('645f6c4c')
- expect(page.status_code).to eq(200)
+ expect(page).to have_selector('.qa-branches-select', text: '645f6c4c82fd3f5e06f67134450a570b795e55a6')
end
context 'for signed commit on default branch', :js do
diff --git a/spec/features/raven_js_spec.rb b/spec/features/raven_js_spec.rb
deleted file mode 100644
index 38699f0cc1b..00000000000
--- a/spec/features/raven_js_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe 'RavenJS' do
- let(:raven_path) { '/raven.chunk.js' }
-
- it 'does not load raven if sentry is disabled' do
- visit new_user_session_path
-
- expect(has_requested_raven).to eq(false)
- end
-
- it 'loads raven if sentry is enabled' do
- stub_sentry_settings
-
- visit new_user_session_path
-
- expect(has_requested_raven).to eq(true)
- end
-
- def has_requested_raven
- page.all('script', visible: false).one? do |elm|
- elm[:src] =~ /#{raven_path}$/
- end
- end
-end
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 7e7c09e4a13..7b969aea547 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -26,10 +26,20 @@ describe 'User uses header search field', :js do
end
end
+ context 'when using the keyboard shortcut' do
+ before do
+ find('#search.js-autocomplete-disabled')
+ find('body').native.send_keys('s')
+ end
+
+ it 'shows the category search dropdown' do
+ expect(page).to have_selector('.dropdown-header', text: /#{scope_name}/i)
+ end
+ end
+
context 'when clicking the search field' do
before do
- page.find('#search').click
- wait_for_all_requests
+ page.find('#search.js-autocomplete-disabled').click
end
it 'shows category search dropdown' do
@@ -78,15 +88,21 @@ describe 'User uses header search field', :js do
end
context 'when entering text into the search field' do
- before do
+ it 'does not display the category search dropdown' do
page.within('.search-input-wrap') do
fill_in('search', with: scope_name.first(4))
end
- end
- it 'does not display the category search dropdown' do
expect(page).not_to have_selector('.dropdown-header', text: /#{scope_name}/i)
end
+
+ it 'hides the dropdown when there are no results' do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: 'a_search_term_with_no_results')
+ end
+
+ expect(page).not_to have_selector('.dropdown-menu')
+ end
end
end
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 768b883a90e..9c1c81918fa 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -264,7 +264,9 @@ describe "Internal Project Access" do
before do
# Speed increase
- allow_any_instance_of(Project).to receive(:branches).and_return([])
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:branches).and_return([])
+ end
end
it { is_expected.to be_allowed_for(:admin) }
@@ -283,7 +285,9 @@ describe "Internal Project Access" do
before do
# Speed increase
- allow_any_instance_of(Project).to receive(:tags).and_return([])
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:tags).and_return([])
+ end
end
it { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index c2d44c05a22..dbaf97bc3fd 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -236,7 +236,9 @@ describe "Private Project Access" do
before do
# Speed increase
- allow_any_instance_of(Project).to receive(:branches).and_return([])
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:branches).and_return([])
+ end
end
it { is_expected.to be_allowed_for(:admin) }
@@ -255,7 +257,9 @@ describe "Private Project Access" do
before do
# Speed increase
- allow_any_instance_of(Project).to receive(:tags).and_return([])
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:tags).and_return([])
+ end
end
it { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 19f01257713..35cbc195f4f 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -477,7 +477,9 @@ describe "Public Project Access" do
before do
# Speed increase
- allow_any_instance_of(Project).to receive(:branches).and_return([])
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:branches).and_return([])
+ end
end
it { is_expected.to be_allowed_for(:admin) }
@@ -496,7 +498,9 @@ describe "Public Project Access" do
before do
# Speed increase
- allow_any_instance_of(Project).to receive(:tags).and_return([])
+ allow_next_instance_of(Project) do |instance|
+ allow(instance).to receive(:tags).and_return([])
+ end
end
it { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/sentry_js_spec.rb b/spec/features/sentry_js_spec.rb
new file mode 100644
index 00000000000..b39c4f0a0ae
--- /dev/null
+++ b/spec/features/sentry_js_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Sentry' do
+ let(:sentry_path) { '/sentry.chunk.js' }
+
+ it 'does not load sentry if sentry is disabled' do
+ allow(Gitlab.config.sentry).to receive(:enabled).and_return(false)
+ visit new_user_session_path
+
+ expect(has_requested_sentry).to eq(false)
+ end
+
+ it 'loads sentry if sentry is enabled' do
+ stub_sentry_settings
+
+ visit new_user_session_path
+
+ expect(has_requested_sentry).to eq(true)
+ end
+
+ def has_requested_sentry
+ page.all('script', visible: false).one? do |elm|
+ elm[:src] =~ /#{sentry_path}$/
+ end
+ end
+end
diff --git a/spec/features/signed_commits_spec.rb b/spec/features/signed_commits_spec.rb
index 70e6978a7b6..f56bd055224 100644
--- a/spec/features/signed_commits_spec.rb
+++ b/spec/features/signed_commits_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'GPG signed commits' do
let(:project) { create(:project, :public, :repository) }
- it 'changes from unverified to verified when the user changes his email to match the gpg key' do
+ it 'changes from unverified to verified when the user changes his email to match the gpg key', :sidekiq_might_not_need_inline do
ref = GpgHelpers::SIGNED_AND_AUTHORED_SHA
user = create(:user, email: 'unrelated.user@example.org')
@@ -30,7 +30,7 @@ describe 'GPG signed commits' do
expect(page).to have_button 'Verified'
end
- it 'changes from unverified to verified when the user adds the missing gpg key' do
+ it 'changes from unverified to verified when the user adds the missing gpg key', :sidekiq_might_not_need_inline do
ref = GpgHelpers::SIGNED_AND_AUTHORED_SHA
user = create(:user, email: GpgHelpers::User1.emails.first)
@@ -152,4 +152,26 @@ describe 'GPG signed commits' do
end
end
end
+
+ context 'view signed commit on the tree view', :js do
+ shared_examples 'a commit with a signature' do
+ before do
+ visit project_tree_path(project, 'signed-commits')
+ end
+
+ it 'displays commit signature' do
+ expect(page).to have_button 'Unverified'
+
+ click_on 'Unverified'
+
+ within '.popover' do
+ expect(page).to have_content 'This commit was signed with an unverified signature'
+ end
+ end
+ end
+
+ context 'with vue tree view enabled' do
+ it_behaves_like 'a commit with a signature'
+ end
+ end
end
diff --git a/spec/features/tags/developer_deletes_tag_spec.rb b/spec/features/tags/developer_deletes_tag_spec.rb
index 82b416c3a7f..0fc62a578f9 100644
--- a/spec/features/tags/developer_deletes_tag_spec.rb
+++ b/spec/features/tags/developer_deletes_tag_spec.rb
@@ -39,8 +39,10 @@ describe 'Developer deletes tag' do
context 'when pre-receive hook fails', :js do
before do
- allow_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rm_tag)
- .and_raise(Gitlab::Git::PreReceiveError, 'GitLab: Do not delete tags')
+ allow_next_instance_of(Gitlab::GitalyClient::OperationService) do |instance|
+ allow(instance).to receive(:rm_tag)
+ .and_raise(Gitlab::Git::PreReceiveError, 'GitLab: Do not delete tags')
+ end
end
it 'shows the error message' do
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index 2f8b715289c..cf30776786b 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Unsubscribe links' do
+describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
include Warden::Test::Helpers
let(:recipient) { create(:user) }
diff --git a/spec/features/user_sees_revert_modal_spec.rb b/spec/features/user_sees_revert_modal_spec.rb
index 24b4f8dd4aa..c0cffe885de 100644
--- a/spec/features/user_sees_revert_modal_spec.rb
+++ b/spec/features/user_sees_revert_modal_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Merge request > User sees revert modal', :js do
+describe 'Merge request > User sees revert modal', :js, :sidekiq_might_not_need_inline do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/users/anonymous_sessions_spec.rb b/spec/features/users/anonymous_sessions_spec.rb
new file mode 100644
index 00000000000..e87ee39a3f4
--- /dev/null
+++ b/spec/features/users/anonymous_sessions_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Session TTLs', :clean_gitlab_redis_shared_state do
+ it 'creates a session with a short TTL when login fails' do
+ visit new_user_session_path
+ # The session key only gets created after a post
+ fill_in 'user_login', with: 'non-existant@gitlab.org'
+ fill_in 'user_password', with: '12345678'
+ click_button 'Sign in'
+
+ expect(page).to have_content('Invalid Login or password')
+
+ expect_single_session_with_expiration(Settings.gitlab['unauthenticated_session_expire_delay'])
+ end
+
+ it 'increases the TTL when the login succeeds' do
+ user = create(:user)
+ gitlab_sign_in(user)
+
+ expect(page).to have_content(user.name)
+
+ expect_single_session_with_expiration(Settings.gitlab['session_expire_delay'] * 60)
+ end
+
+ def expect_single_session_with_expiration(expiration)
+ session_keys = get_session_keys
+
+ expect(session_keys.size).to eq(1)
+ expect(get_ttl(session_keys.first)).to eq expiration
+ end
+
+ def get_session_keys
+ Gitlab::Redis::SharedState.with { |redis| redis.scan_each(match: 'session:gitlab:*').to_a }
+ end
+
+ def get_ttl(key)
+ Gitlab::Redis::SharedState.with { |redis| redis.ttl(key) }
+ end
+end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index d1f3b3f4076..b7c54bb6de8 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -806,7 +806,7 @@ describe 'Login' do
gitlab_sign_in(user)
expect(current_path).to eq root_path
- expect(page).to have_content("Please check your email (#{user.email}) to verify that you own this address.")
+ expect(page).to have_content("Please check your email (#{user.email}) to verify that you own this address and unlock the power of CI/CD.")
end
context "when not having confirmed within Devise's allow_unconfirmed_access_for time" do
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 562d6fcab1b..3b19bd423a4 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -222,7 +222,7 @@ shared_examples 'Signup' do
expect(current_path).to eq users_sign_up_welcome_path
else
expect(current_path).to eq dashboard_projects_path
- expect(page).to have_content("Please check your email (#{new_user.email}) to verify that you own this address.")
+ expect(page).to have_content("Please check your email (#{new_user.email}) to verify that you own this address and unlock the power of CI/CD.")
end
end
end
@@ -379,7 +379,9 @@ shared_examples 'Signup' do
before do
InvisibleCaptcha.timestamp_enabled = true
stub_application_setting(recaptcha_enabled: true)
- allow_any_instance_of(RegistrationsController).to receive(:verify_recaptcha).and_return(false)
+ allow_next_instance_of(RegistrationsController) do |instance|
+ allow(instance).to receive(:verify_recaptcha).and_return(false)
+ end
end
after do
@@ -413,6 +415,7 @@ end
describe 'With original flow' do
before do
stub_experiment(signup_flow: false)
+ stub_experiment_for_user(signup_flow: false)
end
it_behaves_like 'Signup'
@@ -421,6 +424,7 @@ end
describe 'With experimental flow' do
before do
stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: true)
end
it_behaves_like 'Signup'
@@ -439,11 +443,13 @@ describe 'With experimental flow' do
fill_in 'user_name', with: 'New name'
select 'Software Developer', from: 'user_role'
+ choose 'user_setup_for_company_true'
click_button 'Get started!'
new_user = User.find_by_username(new_user.username)
expect(new_user.name).to eq 'New name'
expect(new_user.software_developer_role?).to be_truthy
+ expect(new_user.setup_for_company).to be_truthy
expect(page).to have_current_path(new_project_path)
end
end
diff --git a/spec/finders/abuse_reports_finder_spec.rb b/spec/finders/abuse_reports_finder_spec.rb
new file mode 100644
index 00000000000..c84a645ca08
--- /dev/null
+++ b/spec/finders/abuse_reports_finder_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe AbuseReportsFinder, '#execute' do
+ let(:params) { {} }
+ let!(:user1) { create(:user) }
+ let!(:user2) { create(:user) }
+ let!(:abuse_report_1) { create(:abuse_report, user: user1) }
+ let!(:abuse_report_2) { create(:abuse_report, user: user2) }
+
+ subject { described_class.new(params).execute }
+
+ context 'empty params' do
+ it 'returns all abuse reports' do
+ expect(subject).to match_array([abuse_report_1, abuse_report_2])
+ end
+ end
+
+ context 'params[:user_id] is present' do
+ let(:params) { { user_id: user2 } }
+
+ it 'returns abuse reports for the specified user' do
+ expect(subject).to match_array([abuse_report_2])
+ end
+ end
+end
diff --git a/spec/finders/branches_finder_spec.rb b/spec/finders/branches_finder_spec.rb
index 1a33bdf11d7..70b5da0cc3c 100644
--- a/spec/finders/branches_finder_spec.rb
+++ b/spec/finders/branches_finder_spec.rb
@@ -73,58 +73,76 @@ describe BranchesFinder do
expect(result.count).to eq(3)
expect(result.map(&:name)).to eq(%w{csv fix lfs})
end
- end
- context 'filter and sort' do
- it 'filters branches by name and sorts by recently_updated' do
- params = { sort: 'updated_desc', search: 'feat' }
+ it 'filters branches by name that begins with' do
+ params = { search: '^feature_' }
branches_finder = described_class.new(repository, params)
result = branches_finder.execute
expect(result.first.name).to eq('feature_conflict')
- expect(result.count).to eq(2)
+ expect(result.count).to eq(1)
end
- it 'filters branches by name and sorts by recently_updated, with exact matches first' do
- params = { sort: 'updated_desc', search: 'feature' }
+ it 'filters branches by name that ends with' do
+ params = { search: 'feature$' }
branches_finder = described_class.new(repository, params)
result = branches_finder.execute
expect(result.first.name).to eq('feature')
- expect(result.second.name).to eq('feature_conflict')
- expect(result.count).to eq(2)
+ expect(result.count).to eq(1)
end
- it 'filters branches by name and sorts by last_updated' do
- params = { sort: 'updated_asc', search: 'feature' }
+ it 'filters branches by nonexistent name that begins with' do
+ params = { search: '^nope' }
branches_finder = described_class.new(repository, params)
result = branches_finder.execute
- expect(result.first.name).to eq('feature')
- expect(result.count).to eq(2)
+ expect(result.count).to eq(0)
end
- it 'filters branches by name that begins with' do
- params = { search: '^feature_' }
+ it 'filters branches by nonexistent name that ends with' do
+ params = { search: 'nope$' }
+ branches_finder = described_class.new(repository, params)
+
+ result = branches_finder.execute
+
+ expect(result.count).to eq(0)
+ end
+ end
+
+ context 'filter and sort' do
+ it 'filters branches by name and sorts by recently_updated' do
+ params = { sort: 'updated_desc', search: 'feat' }
branches_finder = described_class.new(repository, params)
result = branches_finder.execute
expect(result.first.name).to eq('feature_conflict')
- expect(result.count).to eq(1)
+ expect(result.count).to eq(2)
end
- it 'filters branches by name that ends with' do
- params = { search: 'feature$' }
+ it 'filters branches by name and sorts by recently_updated, with exact matches first' do
+ params = { sort: 'updated_desc', search: 'feature' }
branches_finder = described_class.new(repository, params)
result = branches_finder.execute
expect(result.first.name).to eq('feature')
- expect(result.count).to eq(1)
+ expect(result.second.name).to eq('feature_conflict')
+ expect(result.count).to eq(2)
+ end
+
+ it 'filters branches by name and sorts by last_updated' do
+ params = { sort: 'updated_asc', search: 'feature' }
+ branches_finder = described_class.new(repository, params)
+
+ result = branches_finder.execute
+
+ expect(result.first.name).to eq('feature')
+ expect(result.count).to eq(2)
end
end
end
diff --git a/spec/finders/container_repositories_finder_spec.rb b/spec/finders/container_repositories_finder_spec.rb
index deec62d6598..08c241186d6 100644
--- a/spec/finders/container_repositories_finder_spec.rb
+++ b/spec/finders/container_repositories_finder_spec.rb
@@ -3,42 +3,50 @@
require 'spec_helper'
describe ContainerRepositoriesFinder do
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
- let(:project_repository) { create(:container_repository, project: project) }
+ let!(:project_repository) { create(:container_repository, project: project) }
+
+ before do
+ group.add_reporter(reporter)
+ project.add_reporter(reporter)
+ end
describe '#execute' do
- let(:id) { nil }
+ context 'with authorized user' do
+ subject { described_class.new(user: reporter, subject: subject_object).execute }
- subject { described_class.new(id: id, container_type: container_type).execute }
+ context 'when subject_type is group' do
+ let(:subject_object) { group }
+ let(:other_project) { create(:project, group: group) }
- context 'when container_type is group' do
- let(:other_project) { create(:project, group: group) }
+ let(:other_repository) do
+ create(:container_repository, name: 'test_repository2', project: other_project)
+ end
- let(:other_repository) do
- create(:container_repository, name: 'test_repository2', project: other_project)
+ it { is_expected.to match_array([project_repository, other_repository]) }
end
- let(:container_type) { :group }
- let(:id) { group.id }
+ context 'when subject_type is project' do
+ let(:subject_object) { project }
- it { is_expected.to match_array([project_repository, other_repository]) }
- end
+ it { is_expected.to match_array([project_repository]) }
+ end
- context 'when container_type is project' do
- let(:container_type) { :project }
- let(:id) { project.id }
+ context 'with invalid subject_type' do
+ let(:subject_object) { "invalid type" }
- it { is_expected.to match_array([project_repository]) }
+ it { expect { subject }.to raise_exception('invalid subject_type') }
+ end
end
- context 'with invalid id' do
- let(:container_type) { :project }
- let(:id) { 123456789 }
+ context 'with unauthorized user' do
+ subject { described_class.new(user: guest, subject: group).execute }
- it 'raises an error' do
- expect { subject.execute }.to raise_error(ActiveRecord::RecordNotFound)
- end
+ it { is_expected.to be nil }
end
end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index c27ce263bf0..6c10a617279 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -163,6 +163,20 @@ describe IssuesFinder do
end
end
+ context 'filtering by nonexistent author ID and issue term using CTE for search' do
+ let(:params) do
+ {
+ author_id: 'does-not-exist',
+ search: 'git',
+ attempt_group_search_optimizations: true
+ }
+ end
+
+ it 'returns no results' do
+ expect(issues).to be_empty
+ end
+ end
+
context 'filtering by milestone' do
let(:params) { { milestone_title: milestone.title } }
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index a396284f1e9..bc85a622119 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -23,6 +23,18 @@ describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request1)
end
+ it 'filters by nonexistent author ID and MR term using CTE for search' do
+ params = {
+ author_id: 'does-not-exist',
+ search: 'git',
+ attempt_group_search_optimizations: true
+ }
+
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to be_empty
+ end
+
it 'filters by projects' do
params = { projects: [project2.id, project3.id] }
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index 4ec12b5a7f7..a9344cd593a 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-describe ProjectsFinder do
+describe ProjectsFinder, :do_not_mock_admin_mode do
+ include AdminModeHelper
+
describe '#execute' do
let(:user) { create(:user) }
let(:group) { create(:group, :public) }
@@ -56,6 +58,31 @@ describe ProjectsFinder do
it { is_expected.to eq([internal_project]) }
end
+ describe 'with id_after' do
+ context 'only returns projects with a project id greater than given' do
+ let(:params) { { id_after: internal_project.id }}
+
+ it { is_expected.to eq([public_project]) }
+ end
+ end
+
+ describe 'with id_before' do
+ context 'only returns projects with a project id less than given' do
+ let(:params) { { id_before: public_project.id }}
+
+ it { is_expected.to eq([internal_project]) }
+ end
+ end
+
+ describe 'with both id_before and id_after' do
+ context 'only returns projects with a project id less than given' do
+ let!(:projects) { create_list(:project, 5, :public) }
+ let(:params) { { id_after: projects.first.id, id_before: projects.last.id }}
+
+ it { is_expected.to contain_exactly(*projects[1..-2]) }
+ end
+ end
+
describe 'filter by visibility_level' do
before do
private_project.add_maintainer(user)
@@ -188,5 +215,21 @@ describe ProjectsFinder do
it { is_expected.to eq([internal_project, public_project]) }
end
+
+ describe 'with admin user' do
+ let(:user) { create(:admin) }
+
+ context 'admin mode enabled' do
+ before do
+ enable_admin_mode!(current_user)
+ end
+
+ it { is_expected.to match_array([public_project, internal_project, private_project, shared_project]) }
+ end
+
+ context 'admin mode disabled' do
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
+ end
end
end
diff --git a/spec/finders/prometheus_metrics_finder_spec.rb b/spec/finders/prometheus_metrics_finder_spec.rb
new file mode 100644
index 00000000000..41b2e700e1e
--- /dev/null
+++ b/spec/finders/prometheus_metrics_finder_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PrometheusMetricsFinder do
+ describe '#execute' do
+ let(:finder) { described_class.new(params) }
+ let(:params) { {} }
+
+ subject { finder.execute }
+
+ context 'with params' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_metric) { create(:prometheus_metric, project: project) }
+ let_it_be(:common_metric) { create(:prometheus_metric, :common) }
+ let_it_be(:unique_metric) do
+ create(
+ :prometheus_metric,
+ :common,
+ title: 'Unique title',
+ y_label: 'Unique y_label',
+ group: :kubernetes,
+ identifier: 'identifier',
+ created_at: 5.minutes.ago
+ )
+ end
+
+ context 'with appropriate indexes' do
+ before do
+ allow_any_instance_of(described_class).to receive(:appropriate_index?).and_return(true)
+ end
+
+ context 'with project' do
+ let(:params) { { project: project } }
+
+ it { is_expected.to eq([project_metric]) }
+ end
+
+ context 'with group' do
+ let(:params) { { group: project_metric.group } }
+
+ it { is_expected.to contain_exactly(common_metric, project_metric) }
+ end
+
+ context 'with title' do
+ let(:params) { { title: project_metric.title } }
+
+ it { is_expected.to contain_exactly(project_metric, common_metric) }
+ end
+
+ context 'with y_label' do
+ let(:params) { { y_label: project_metric.y_label } }
+
+ it { is_expected.to contain_exactly(project_metric, common_metric) }
+ end
+
+ context 'with common' do
+ let(:params) { { common: true } }
+
+ it { is_expected.to contain_exactly(common_metric, unique_metric) }
+ end
+
+ context 'with ordered' do
+ let(:params) { { ordered: true } }
+
+ it { is_expected.to eq([unique_metric, project_metric, common_metric]) }
+ end
+
+ context 'with indentifier' do
+ let(:params) { { identifier: unique_metric.identifier } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ ArgumentError,
+ ':identifier must be scoped to a :project or :common'
+ )
+ end
+
+ context 'with common' do
+ let(:params) { { identifier: unique_metric.identifier, common: true } }
+
+ it { is_expected.to contain_exactly(unique_metric) }
+ end
+
+ context 'with id' do
+ let(:params) { { id: 14, identifier: 'string' } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ ArgumentError,
+ 'Only one of :identifier, :id is permitted'
+ )
+ end
+ end
+ end
+
+ context 'with id' do
+ let(:params) { { id: common_metric.id } }
+
+ it { is_expected.to contain_exactly(common_metric) }
+ end
+
+ context 'with multiple params' do
+ let(:params) do
+ {
+ group: project_metric.group,
+ title: project_metric.title,
+ y_label: project_metric.y_label,
+ common: true,
+ ordered: true
+ }
+ end
+
+ it { is_expected.to contain_exactly(common_metric) }
+ end
+ end
+
+ context 'without an appropriate index' do
+ let(:params) do
+ {
+ title: project_metric.title,
+ ordered: true
+ }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ ArgumentError,
+ 'An index should exist for params: [:title]'
+ )
+ end
+ end
+ end
+
+ context 'without params' do
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ ArgumentError,
+ 'Please provide one or more of: [:project, :group, :title, :y_label, :identifier, :id, :common, :ordered]'
+ )
+ end
+ end
+ end
+end
diff --git a/spec/finders/releases_finder_spec.rb b/spec/finders/releases_finder_spec.rb
index 5ffb8c74bf5..b9c67361f45 100644
--- a/spec/finders/releases_finder_spec.rb
+++ b/spec/finders/releases_finder_spec.rb
@@ -8,8 +8,7 @@ describe ReleasesFinder do
let(:repository) { project.repository }
let(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
let(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
-
- subject { described_class.new(project, user)}
+ let(:finder) { described_class.new(project, user) }
before do
v1_0_0.update_attribute(:released_at, 2.days.ago)
@@ -17,11 +16,13 @@ describe ReleasesFinder do
end
describe '#execute' do
+ subject { finder.execute(**args) }
+
+ let(:args) { {} }
+
context 'when the user is not part of the project' do
it 'returns no releases' do
- releases = subject.execute
-
- expect(releases).to be_empty
+ is_expected.to be_empty
end
end
@@ -31,11 +32,25 @@ describe ReleasesFinder do
end
it 'sorts by release date' do
- releases = subject.execute
+ is_expected.to be_present
+ expect(subject.size).to eq(2)
+ expect(subject).to eq([v1_1_0, v1_0_0])
+ end
+
+ it 'preloads associations' do
+ expect(Release).to receive(:preloaded).once.and_call_original
+
+ subject
+ end
+
+ context 'when preload is false' do
+ let(:args) { { preload: false } }
+
+ it 'does not preload associations' do
+ expect(Release).not_to receive(:preloaded)
- expect(releases).to be_present
- expect(releases.size).to eq(2)
- expect(releases).to eq([v1_1_0, v1_0_0])
+ subject
+ end
end
end
end
diff --git a/spec/finders/tags_finder_spec.rb b/spec/finders/tags_finder_spec.rb
index 85f970b71c4..e9f29ab2441 100644
--- a/spec/finders/tags_finder_spec.rb
+++ b/spec/finders/tags_finder_spec.rb
@@ -54,6 +54,44 @@ describe TagsFinder do
expect(result.count).to eq(0)
end
+
+ it 'filters tags by name that begins with' do
+ params = { search: '^v1.0' }
+ tags_finder = described_class.new(repository, params)
+
+ result = tags_finder.execute
+
+ expect(result.first.name).to eq('v1.0.0')
+ expect(result.count).to eq(1)
+ end
+
+ it 'filters tags by name that ends with' do
+ params = { search: '0.0$' }
+ tags_finder = described_class.new(repository, params)
+
+ result = tags_finder.execute
+
+ expect(result.first.name).to eq('v1.0.0')
+ expect(result.count).to eq(1)
+ end
+
+ it 'filters tags by nonexistent name that begins with' do
+ params = { search: '^nope' }
+ tags_finder = described_class.new(repository, params)
+
+ result = tags_finder.execute
+
+ expect(result.count).to eq(0)
+ end
+
+ it 'filters tags by nonexistent name that ends with' do
+ params = { search: 'nope$' }
+ tags_finder = described_class.new(repository, params)
+
+ result = tags_finder.execute
+
+ expect(result.count).to eq(0)
+ end
end
context 'filter and sort' do
diff --git a/spec/finders/todos_finder_spec.rb b/spec/finders/todos_finder_spec.rb
index 044e135fa0b..a837e7af251 100644
--- a/spec/finders/todos_finder_spec.rb
+++ b/spec/finders/todos_finder_spec.rb
@@ -36,10 +36,18 @@ describe TodosFinder do
expect(todos).to match_array([todo1, todo2])
end
- it 'returns correct todos when filtered by a type' do
- todos = finder.new(user, { type: 'Issue' }).execute
+ context 'when filtering by type' do
+ it 'returns correct todos when filtered by a type' do
+ todos = finder.new(user, { type: 'Issue' }).execute
- expect(todos).to match_array([todo1])
+ expect(todos).to match_array([todo1])
+ end
+
+ it 'returns the correct todos when filtering for multiple types' do
+ todos = finder.new(user, { type: %w[Issue MergeRequest] }).execute
+
+ expect(todos).to match_array([todo1, todo2])
+ end
end
context 'when filtering for actions' do
@@ -53,12 +61,10 @@ describe TodosFinder do
expect(todos).to match_array([todo2])
end
- context 'multiple actions' do
- it 'returns the expected todos' do
- todos = finder.new(user, { action_id: [Todo::DIRECTLY_ADDRESSED, Todo::ASSIGNED] }).execute
+ it 'returns the expected todos when filtering for multiple action ids' do
+ todos = finder.new(user, { action_id: [Todo::DIRECTLY_ADDRESSED, Todo::ASSIGNED] }).execute
- expect(todos).to match_array([todo2, todo1])
- end
+ expect(todos).to match_array([todo2, todo1])
end
end
@@ -69,12 +75,10 @@ describe TodosFinder do
expect(todos).to match_array([todo2])
end
- context 'multiple actions' do
- it 'returns the expected todos' do
- todos = finder.new(user, { action: [:directly_addressed, :assigned] }).execute
+ it 'returns the expected todos when filtering for multiple action names' do
+ todos = finder.new(user, { action: [:directly_addressed, :assigned] }).execute
- expect(todos).to match_array([todo2, todo1])
- end
+ expect(todos).to match_array([todo2, todo1])
end
end
end
@@ -136,6 +140,51 @@ describe TodosFinder do
end
end
end
+
+ context 'by state' do
+ let!(:todo1) { create(:todo, user: user, group: group, target: issue, state: :done) }
+ let!(:todo2) { create(:todo, user: user, group: group, target: issue, state: :pending) }
+
+ it 'returns the expected items when no state is provided' do
+ todos = finder.new(user, {}).execute
+
+ expect(todos).to match_array([todo2])
+ end
+
+ it 'returns the expected items when a state is provided' do
+ todos = finder.new(user, { state: :done }).execute
+
+ expect(todos).to match_array([todo1])
+ end
+
+ it 'returns the expected items when multiple states are provided' do
+ todos = finder.new(user, { state: [:pending, :done] }).execute
+
+ expect(todos).to match_array([todo1, todo2])
+ end
+ end
+
+ context 'by project' do
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:project3) { create(:project) }
+
+ let!(:todo1) { create(:todo, user: user, project: project1, state: :pending) }
+ let!(:todo2) { create(:todo, user: user, project: project2, state: :pending) }
+ let!(:todo3) { create(:todo, user: user, project: project3, state: :pending) }
+
+ it 'returns the expected todos for one project' do
+ todos = finder.new(user, { project_id: project2.id }).execute
+
+ expect(todos).to match_array([todo2])
+ end
+
+ it 'returns the expected todos for many projects' do
+ todos = finder.new(user, { project_id: [project2.id, project1.id] }).execute
+
+ expect(todos).to match_array([todo2, todo1])
+ end
+ end
end
context 'external authorization' do
@@ -207,6 +256,19 @@ describe TodosFinder do
end
end
+ describe '.todo_types' do
+ it 'returns the expected types' do
+ expected_result =
+ if Gitlab.ee?
+ %w[Epic Issue MergeRequest]
+ else
+ %w[Issue MergeRequest]
+ end
+
+ expect(described_class.todo_types).to contain_exactly(*expected_result)
+ end
+ end
+
describe '#any_for_target?' do
it 'returns true if there are any todos for the given target' do
todo = create(:todo, :pending)
diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json
index 695175689b9..f978baa2026 100644
--- a/spec/fixtures/api/schemas/cluster_status.json
+++ b/spec/fixtures/api/schemas/cluster_status.json
@@ -35,7 +35,9 @@
"external_ip": { "type": ["string", "null"] },
"external_hostname": { "type": ["string", "null"] },
"hostname": { "type": ["string", "null"] },
+ "kibana_hostname": { "type": ["string", "null"] },
"email": { "type": ["string", "null"] },
+ "stack": { "type": ["string", "null"] },
"update_available": { "type": ["boolean", "null"] },
"can_uninstall": { "type": "boolean" }
},
diff --git a/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json b/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json
index 682e345d5f5..11076ec73de 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json
@@ -3,6 +3,8 @@
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
+ "project_emails_disabled": { "type": "boolean" },
+ "subscribe_disabled_description": { "type": "string" },
"subscribed": { "type": "boolean" },
"time_estimate": { "type": "integer" },
"total_time_spent": { "type": "integer" },
diff --git a/spec/fixtures/api/schemas/error_tracking/error.json b/spec/fixtures/api/schemas/error_tracking/error.json
index df2c02d7d5d..3f65105681e 100644
--- a/spec/fixtures/api/schemas/error_tracking/error.json
+++ b/spec/fixtures/api/schemas/error_tracking/error.json
@@ -4,7 +4,14 @@
"external_url",
"last_seen",
"message",
- "type"
+ "type",
+ "title",
+ "project_id",
+ "project_name",
+ "project_slug",
+ "short_id",
+ "status",
+ "frequency"
],
"properties" : {
"id": { "type": "string"},
@@ -15,7 +22,14 @@
"culprit": { "type": "string" },
"count": { "type": "integer"},
"external_url": { "type": "string" },
- "user_count": { "type": "integer"}
+ "user_count": { "type": "integer"},
+ "title": { "type": "string"},
+ "project_id": { "type": "string"},
+ "project_name": { "type": "string"},
+ "project_slug": { "type": "string"},
+ "short_id": { "type": "string"},
+ "status": { "type": "string"},
+ "frequency": { "type": "array"}
},
- "additionalProperties": true
+ "additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/error_tracking/error_detailed.json b/spec/fixtures/api/schemas/error_tracking/error_detailed.json
new file mode 100644
index 00000000000..40d6773f0e6
--- /dev/null
+++ b/spec/fixtures/api/schemas/error_tracking/error_detailed.json
@@ -0,0 +1,45 @@
+{
+ "type": "object",
+ "required" : [
+ "external_url",
+ "external_base_url",
+ "last_seen",
+ "message",
+ "type",
+ "title",
+ "project_id",
+ "project_name",
+ "project_slug",
+ "short_id",
+ "status",
+ "frequency",
+ "first_release_last_commit",
+ "last_release_last_commit",
+ "first_release_short_version",
+ "last_release_short_version"
+ ],
+ "properties" : {
+ "id": { "type": "string"},
+ "first_seen": { "type": "string", "format": "date-time" },
+ "last_seen": { "type": "string", "format": "date-time" },
+ "type": { "type": "string" },
+ "message": { "type": "string" },
+ "culprit": { "type": "string" },
+ "count": { "type": "integer"},
+ "external_url": { "type": "string" },
+ "external_base_url": { "type": "string" },
+ "user_count": { "type": "integer"},
+ "title": { "type": "string"},
+ "project_id": { "type": "string"},
+ "project_name": { "type": "string"},
+ "project_slug": { "type": "string"},
+ "short_id": { "type": "string"},
+ "status": { "type": "string"},
+ "frequency": { "type": "array"},
+ "first_release_last_commit": { "type": ["string", "null"] },
+ "last_release_last_commit": { "type": ["string", "null"] },
+ "first_release_short_version": { "type": ["string", "null"] },
+ "last_release_short_version": { "type": ["string", "null"] }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/error_tracking/error_stack_trace.json b/spec/fixtures/api/schemas/error_tracking/error_stack_trace.json
new file mode 100644
index 00000000000..a684dd0496a
--- /dev/null
+++ b/spec/fixtures/api/schemas/error_tracking/error_stack_trace.json
@@ -0,0 +1,14 @@
+{
+ "type": "object",
+ "required": [
+ "issue_id",
+ "stack_trace_entries",
+ "date_received"
+ ],
+ "properties": {
+ "issue_id": { "type": ["string", "integer"] },
+ "stack_trace_entries": { "type": "object" },
+ "date_received": { "type": "string" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/error_tracking/issue_detailed.json b/spec/fixtures/api/schemas/error_tracking/issue_detailed.json
new file mode 100644
index 00000000000..b5adea6fc62
--- /dev/null
+++ b/spec/fixtures/api/schemas/error_tracking/issue_detailed.json
@@ -0,0 +1,11 @@
+{
+ "type": "object",
+ "required": [
+ "error"
+ ],
+ "properties": {
+ "error": { "$ref": "error_detailed.json" }
+ },
+ "additionalProperties": false
+}
+
diff --git a/spec/fixtures/api/schemas/error_tracking/issue_stack_trace.json b/spec/fixtures/api/schemas/error_tracking/issue_stack_trace.json
new file mode 100644
index 00000000000..7ec1ae63609
--- /dev/null
+++ b/spec/fixtures/api/schemas/error_tracking/issue_stack_trace.json
@@ -0,0 +1,11 @@
+{
+ "type": "object",
+ "required": [
+ "error"
+ ],
+ "properties": {
+ "error": { "$ref": "error_stack_trace.json" }
+ },
+ "additionalProperties": false
+}
+
diff --git a/spec/fixtures/api/schemas/public_api/v4/blobs.json b/spec/fixtures/api/schemas/public_api/v4/blobs.json
index a812815838f..5dcefb42367 100644
--- a/spec/fixtures/api/schemas/public_api/v4/blobs.json
+++ b/spec/fixtures/api/schemas/public_api/v4/blobs.json
@@ -5,6 +5,7 @@
"properties" : {
"basename": { "type": "string" },
"data": { "type": "string" },
+ "path": { "type": ["string"] },
"filename": { "type": ["string"] },
"id": { "type": ["string", "null"] },
"project_id": { "type": "integer" },
@@ -12,7 +13,7 @@
"startline": { "type": "integer" }
},
"required": [
- "basename", "data", "filename", "id", "ref", "startline", "project_id"
+ "basename", "data", "path", "filename", "id", "ref", "startline", "project_id"
],
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
index ed8ed9085c0..721b8d4641f 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
@@ -7,6 +7,7 @@
"verified": { "type": "boolean" },
"verification_code": { "type": ["string", "null"] },
"enabled_until": { "type": ["date", "null"] },
+ "auto_ssl_enabled": { "type": "boolean" },
"certificate_expiration": {
"type": "object",
"properties": {
@@ -17,6 +18,6 @@
"additionalProperties": false
}
},
- "required": ["domain", "url", "project_id", "verified", "verification_code", "enabled_until"],
+ "required": ["domain", "url", "project_id", "verified", "verification_code", "enabled_until", "auto_ssl_enabled"],
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
index b57d544f896..3dd80a6f11b 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
@@ -6,6 +6,7 @@
"verified": { "type": "boolean" },
"verification_code": { "type": ["string", "null"] },
"enabled_until": { "type": ["date", "null"] },
+ "auto_ssl_enabled": { "type": "boolean" },
"certificate": {
"type": "object",
"properties": {
@@ -18,6 +19,6 @@
"additionalProperties": false
}
},
- "required": ["domain", "url", "verified", "verification_code", "enabled_until"],
+ "required": ["domain", "url", "verified", "verification_code", "enabled_until", "auto_ssl_enabled"],
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release.json b/spec/fixtures/api/schemas/public_api/v4/release.json
index 2bdc8bc711c..c83eefeb7ed 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release.json
@@ -38,10 +38,11 @@
"additionalProperties": false
},
"_links": {
- "required": ["merge_requests_url", "issues_url"],
+ "required": ["merge_requests_url", "issues_url", "edit_url"],
"properties": {
"merge_requests_url": { "type": "string" },
- "issues_url": { "type": "string" }
+ "issues_url": { "type": "string" },
+ "edit_url": { "type": "string"}
}
}
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
index bce74892059..dd65a4c7cdb 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
@@ -26,10 +26,11 @@
"additionalProperties": false
},
"_links": {
- "required": ["merge_requests_url", "issues_url"],
+ "required": ["merge_requests_url", "issues_url", "edit_url"],
"properties": {
"merge_requests_url": { "type": "string" },
- "issues_url": { "type": "string" }
+ "issues_url": { "type": "string" },
+ "edit_url": { "type": "string"}
}
}
},
diff --git a/spec/fixtures/api/schemas/release.json b/spec/fixtures/api/schemas/release.json
index 86f0f27606c..b0296e5e62d 100644
--- a/spec/fixtures/api/schemas/release.json
+++ b/spec/fixtures/api/schemas/release.json
@@ -1,9 +1,10 @@
{
"type": "object",
- "required": ["name", "tag_name"],
+ "required": ["tag_name", "description"],
"properties": {
"name": { "type": "string" },
"tag_name": { "type": "string" },
+ "ref": { "type": "string "},
"description": { "type": "string" },
"description_html": { "type": "string" },
"created_at": { "type": "date" },
diff --git a/spec/fixtures/grafana/dashboard_response.json b/spec/fixtures/grafana/dashboard_response.json
new file mode 100644
index 00000000000..c0dd77e2fdc
--- /dev/null
+++ b/spec/fixtures/grafana/dashboard_response.json
@@ -0,0 +1,764 @@
+{
+ "meta": {
+ "type": "db",
+ "canSave": true,
+ "canEdit": true,
+ "canAdmin": true,
+ "canStar": true,
+ "slug": "gitlab-omnibus-redis",
+ "url": "/-/grafana/d/XDaNK6amz/gitlab-omnibus-redis",
+ "expires": "0001-01-01T00:00:00Z",
+ "created": "2019-10-04T13:43:20Z",
+ "updated": "2019-10-04T13:43:20Z",
+ "updatedBy": "Anonymous",
+ "createdBy": "Anonymous",
+ "version": 1,
+ "hasAcl": false,
+ "isFolder": false,
+ "folderId": 1,
+ "folderTitle": "GitLab Omnibus",
+ "folderUrl": "/-/grafana/dashboards/f/l2EpNh2Zk/gitlab-omnibus",
+ "provisioned": true,
+ "provisionedExternalId": "redis.json"
+ },
+ "dashboard": {
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations \u0026 Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "description": "GitLab Omnibus dashboard for Redis servers",
+ "editable": true,
+ "gnetId": 763,
+ "graphTooltip": 0,
+ "id": 3,
+ "iteration": 1556027798221,
+ "links": [],
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"],
+ "datasource": "GitLab Omnibus",
+ "decimals": 0,
+ "editable": true,
+ "error": false,
+ "format": "dtdurations",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "gridPos": { "h": 3, "w": 4, "x": 0, "y": 0 },
+ "id": 9,
+ "interval": null,
+ "isNew": true,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ { "name": "value to text", "value": 1 },
+ { "name": "range to text", "value": 2 }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [{ "from": "null", "text": "N/A", "to": "null" }],
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "addr",
+ "targets": [
+ {
+ "expr": "avg(time() - redis_start_time_seconds{instance=~\"$instance\"})",
+ "format": "time_series",
+ "instant": true,
+ "interval": "",
+ "intervalFactor": 2,
+ "legendFormat": "",
+ "metric": "",
+ "refId": "A",
+ "step": 1800
+ }
+ ],
+ "thresholds": "",
+ "title": "Uptime",
+ "type": "singlestat",
+ "valueFontSize": "70%",
+ "valueMaps": [{ "op": "=", "text": "N/A", "value": "null" }],
+ "valueName": "current"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"],
+ "datasource": "GitLab Omnibus",
+ "decimals": 0,
+ "editable": true,
+ "error": false,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "gridPos": { "h": 3, "w": 4, "x": 4, "y": 0 },
+ "hideTimeOverride": true,
+ "id": 12,
+ "interval": null,
+ "isNew": true,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ { "name": "value to text", "value": 1 },
+ { "name": "range to text", "value": 2 }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [{ "from": "null", "text": "N/A", "to": "null" }],
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": true
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "expr": "sum(\n avg_over_time(redis_connected_clients{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "",
+ "metric": "",
+ "refId": "A",
+ "step": 2
+ }
+ ],
+ "thresholds": "",
+ "timeFrom": "1m",
+ "timeShift": null,
+ "title": "Clients",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [{ "op": "=", "text": "N/A", "value": "null" }],
+ "valueName": "avg"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "gridPos": { "h": 6, "w": 8, "x": 8, "y": 0 },
+ "id": 2,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(\n rate(redis_commands_processed_total{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "",
+ "metric": "A",
+ "refId": "A",
+ "step": 240,
+ "target": ""
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Commands Executed",
+ "tooltip": { "msResolution": false, "shared": true, "sort": 0, "value_type": "cumulative" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "reqps", "label": null, "logBase": 1, "max": null, "min": "0", "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "decimals": 2,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "gridPos": { "h": 6, "w": 8, "x": 16, "y": 0 },
+ "id": 1,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": true,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(\n rate(redis_keyspace_hits_total{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "hide": false,
+ "interval": "1m",
+ "intervalFactor": 1,
+ "legendFormat": "hits",
+ "metric": "",
+ "refId": "A",
+ "step": 240,
+ "target": ""
+ },
+ {
+ "expr": "sum(\n rate(redis_keyspace_misses_total{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "hide": false,
+ "interval": "1m",
+ "intervalFactor": 1,
+ "legendFormat": "misses",
+ "metric": "",
+ "refId": "B",
+ "step": 240,
+ "target": ""
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Hits, Misses per Second",
+ "tooltip": { "msResolution": false, "shared": true, "sort": 0, "value_type": "individual" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "short", "label": "", "logBase": 1, "max": null, "min": 0, "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": { "max": "#BF1B00" },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "gridPos": { "h": 10, "w": 8, "x": 0, "y": 3 },
+ "id": 7,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "null as zero",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [{ "alias": "/max - .*/", "dashes": true }],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "redis_memory_used_bytes{instance=~\"$instance\"}",
+ "format": "time_series",
+ "intervalFactor": 2,
+ "legendFormat": "used - {{instance}}",
+ "metric": "",
+ "refId": "A",
+ "step": 240,
+ "target": ""
+ },
+ {
+ "expr": "redis_config_maxmemory{instance=~\"$instance\"} \u003e 0",
+ "format": "time_series",
+ "hide": false,
+ "intervalFactor": 2,
+ "legendFormat": "max - {{instance}}",
+ "refId": "B",
+ "step": 240
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Memory Usage",
+ "tooltip": { "msResolution": false, "shared": true, "sort": 0, "value_type": "cumulative" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "bytes", "label": null, "logBase": 1, "max": null, "min": 0, "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": {
+ "evicts": "#890F02",
+ "memcached_items_evicted_total{instance=\"172.17.0.1:9150\",job=\"prometheus\"}": "#890F02",
+ "reclaims": "#3F6833"
+ },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "gridPos": { "h": 7, "w": 8, "x": 8, "y": 6 },
+ "id": 8,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [{ "alias": "reclaims", "yaxis": 2 }],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(rate(redis_expired_keys_total{instance=~\"$instance\"}[$__interval]))",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "expired - {{ test_attribute }}",
+ "metric": "",
+ "refId": "A",
+ "step": 240,
+ "target": ""
+ },
+ {
+ "expr": "sum(rate(redis_evicted_keys_total{instance=~\"$instance\"}[$__interval]))",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "evicted",
+ "refId": "B",
+ "step": 240
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Expired / Evicted",
+ "tooltip": { "msResolution": false, "shared": true, "sort": 0, "value_type": "cumulative" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": "0", "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "gridPos": { "h": 7, "w": 8, "x": 16, "y": 6 },
+ "id": 10,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(\n rate(redis_net_input_bytes_total{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "In",
+ "refId": "A",
+ "step": 240
+ },
+ {
+ "expr": "sum(\n rate(redis_net_output_bytes_total{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "Out",
+ "refId": "B",
+ "step": 240
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Network I/O",
+ "tooltip": { "msResolution": true, "shared": true, "sort": 0, "value_type": "cumulative" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "Bps", "label": null, "logBase": 1, "max": null, "min": "0", "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 8,
+ "grid": {},
+ "gridPos": { "h": 7, "w": 16, "x": 0, "y": 13 },
+ "id": 14,
+ "isNew": true,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": true,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": true,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum without (instance) (\n rate(redis_commands_total{instance=~\"$instance\"}[$__interval])\n) \u003e 0",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 2,
+ "legendFormat": "{{ cmd }}",
+ "metric": "redis_command_calls_total",
+ "refId": "A",
+ "step": 240
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Command Calls / sec",
+ "tooltip": { "msResolution": true, "shared": true, "sort": 2, "value_type": "individual" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": "0", "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 7,
+ "grid": {},
+ "gridPos": { "h": 7, "w": 8, "x": 16, "y": 13 },
+ "id": 13,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": true,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(redis_db_keys{instance=~\"$instance\"} - redis_db_keys_expiring{instance=~\"$instance\"}) ",
+ "format": "time_series",
+ "interval": "",
+ "intervalFactor": 2,
+ "legendFormat": "not expiring",
+ "refId": "A",
+ "step": 240,
+ "target": ""
+ },
+ {
+ "expr": "sum(redis_db_keys_expiring{instance=~\"$instance\"})",
+ "format": "time_series",
+ "interval": "",
+ "intervalFactor": 2,
+ "legendFormat": "expiring",
+ "metric": "",
+ "refId": "B",
+ "step": 240
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Expiring vs Not-Expiring Keys",
+ "tooltip": { "msResolution": false, "shared": true, "sort": 0, "value_type": "individual" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": "0", "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "GitLab Omnibus",
+ "editable": true,
+ "error": false,
+ "fill": 7,
+ "grid": {},
+ "gridPos": { "h": 7, "w": 16, "x": 0, "y": 20 },
+ "id": 5,
+ "isNew": true,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "paceLength": 10,
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": true,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum by (db) (\n redis_db_keys{instance=~\"$instance\"}\n)",
+ "format": "time_series",
+ "interval": "",
+ "intervalFactor": 2,
+ "legendFormat": "{{ db }} ",
+ "refId": "A",
+ "step": 240,
+ "target": ""
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Items per DB",
+ "tooltip": { "msResolution": false, "shared": true, "sort": 0, "value_type": "individual" },
+ "type": "graph",
+ "xaxis": { "buckets": null, "mode": "time", "name": null, "show": true, "values": [] },
+ "yaxes": [
+ { "format": "none", "label": null, "logBase": 1, "max": null, "min": "0", "show": true },
+ { "format": "short", "label": null, "logBase": 1, "max": null, "min": null, "show": true }
+ ],
+ "yaxis": { "align": false, "alignLevel": null }
+ }
+ ],
+ "refresh": "1m",
+ "schemaVersion": 18,
+ "style": "dark",
+ "tags": ["redis"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": { "tags": [], "text": "All", "value": "$__all" },
+ "datasource": "GitLab Omnibus",
+ "definition": "",
+ "hide": 0,
+ "includeAll": true,
+ "label": null,
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(up{job=\"redis\"}, instance)",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": { "from": "now-24h", "to": "now" },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"],
+ "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"]
+ },
+ "timezone": "",
+ "title": "GitLab Omnibus - Redis",
+ "uid": "XDaNK6amz",
+ "version": 1
+ }
+}
diff --git a/spec/fixtures/grafana/datasource_response.json b/spec/fixtures/grafana/datasource_response.json
new file mode 100644
index 00000000000..07c075beb35
--- /dev/null
+++ b/spec/fixtures/grafana/datasource_response.json
@@ -0,0 +1,21 @@
+{
+ "id": 1,
+ "orgId": 1,
+ "name": "GitLab Omnibus",
+ "type": "prometheus",
+ "typeLogoUrl": "",
+ "access": "proxy",
+ "url": "http://localhost:9090",
+ "password": "",
+ "user": "",
+ "database": "",
+ "basicAuth": false,
+ "basicAuthUser": "",
+ "basicAuthPassword": "",
+ "withCredentials": false,
+ "isDefault": true,
+ "jsonData": {},
+ "secureJsonFields": {},
+ "version": 1,
+ "readOnly": true
+}
diff --git a/spec/fixtures/grafana/expected_grafana_embed.json b/spec/fixtures/grafana/expected_grafana_embed.json
new file mode 100644
index 00000000000..72fb5477b9e
--- /dev/null
+++ b/spec/fixtures/grafana/expected_grafana_embed.json
@@ -0,0 +1,27 @@
+{
+ "panel_groups": [
+ {
+ "panels": [
+ {
+ "title": "Network I/O",
+ "type": "area-chart",
+ "y_label": "",
+ "metrics": [
+ {
+ "id": "In_0",
+ "query_range": "sum( rate(redis_net_input_bytes_total{instance=~\"localhost:9121\"}[1m]))",
+ "label": "In",
+ "prometheus_endpoint_path": "/foo/bar/-/grafana/proxy/1/api/v1/query_range?query=sum%28++rate%28redis_net_input_bytes_total%7Binstance%3D~%22localhost%3A9121%22%7D%5B1m%5D%29%29"
+ },
+ {
+ "id": "Out_1",
+ "query_range": "sum( rate(redis_net_output_bytes_total{instance=~\"localhost:9121\"}[1m]))",
+ "label": "Out",
+ "prometheus_endpoint_path": "/foo/bar/-/grafana/proxy/1/api/v1/query_range?query=sum%28++rate%28redis_net_output_bytes_total%7Binstance%3D~%22localhost%3A9121%22%7D%5B1m%5D%29%29"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
diff --git a/spec/fixtures/grafana/proxy_response.json b/spec/fixtures/grafana/proxy_response.json
new file mode 100644
index 00000000000..b9f34abcaaf
--- /dev/null
+++ b/spec/fixtures/grafana/proxy_response.json
@@ -0,0 +1,459 @@
+{
+ "status": "success",
+ "data": {
+ "resultType": "matrix",
+ "result": [
+ {
+ "metric": {
+ "test_attribute": "test-attribute-value"
+ },
+ "values": [
+ [1570768177, "54"],
+ [1570768237, "54"],
+ [1570768297, "54"],
+ [1570768357, "54"],
+ [1570768417, "54"],
+ [1570768477, "54"],
+ [1570768537, "54"],
+ [1570768597, "54"],
+ [1570768657, "54"],
+ [1570768717, "54"],
+ [1570768777, "54"],
+ [1570768837, "54"],
+ [1570768897, "54"],
+ [1570768957, "54"],
+ [1570769017, "54"],
+ [1570769077, "54"],
+ [1570769377, "54"],
+ [1570769437, "54"],
+ [1570769497, "54"],
+ [1570769557, "54"],
+ [1570769617, "54"],
+ [1570769677, "54"],
+ [1570769737, "54"],
+ [1570769797, "54"],
+ [1570769857, "54"],
+ [1570769917, "54"],
+ [1570769977, "54"],
+ [1570770037, "54"],
+ [1570770097, "54"],
+ [1570770157, "54"],
+ [1570770217, "54"],
+ [1570770277, "54"],
+ [1570770337, "54"],
+ [1570770397, "54"],
+ [1570770457, "54"],
+ [1570770517, "54"],
+ [1570770577, "54"],
+ [1570770637, "54"],
+ [1570770697, "54"],
+ [1570770757, "54"],
+ [1570770817, "54"],
+ [1570770877, "54"],
+ [1570770937, "54"],
+ [1570770997, "54"],
+ [1570771057, "54"],
+ [1570771117, "54"],
+ [1570771177, "54"],
+ [1570771237, "54"],
+ [1570771297, "54"],
+ [1570771357, "54"],
+ [1570771417, "54"],
+ [1570771477, "54"],
+ [1570771537, "54"],
+ [1570771597, "54"],
+ [1570771657, "54"],
+ [1570771717, "54"],
+ [1570771777, "54"],
+ [1570771837, "54"],
+ [1570771897, "54"],
+ [1570771957, "54"],
+ [1570772017, "54"],
+ [1570772077, "54"],
+ [1570772137, "54"],
+ [1570772197, "54"],
+ [1570772257, "54"],
+ [1570772317, "54"],
+ [1570772377, "54"],
+ [1570772437, "54"],
+ [1570772497, "54"],
+ [1570772557, "54"],
+ [1570772617, "54"],
+ [1570772677, "54"],
+ [1570772737, "54"],
+ [1570772797, "54"],
+ [1570772857, "54"],
+ [1570772917, "54"],
+ [1570772977, "54"],
+ [1570773037, "54"],
+ [1570773097, "54"],
+ [1570773157, "54"],
+ [1570773217, "54"],
+ [1570773277, "54"],
+ [1570773337, "54"],
+ [1570773397, "54"],
+ [1570773457, "54"],
+ [1570773517, "54"],
+ [1570773577, "54"],
+ [1570773637, "54"],
+ [1570773697, "54"],
+ [1570773757, "54"],
+ [1570773817, "54"],
+ [1570773877, "54"],
+ [1570773937, "54"],
+ [1570773997, "54"],
+ [1570774057, "54"],
+ [1570774117, "54"],
+ [1570774177, "54"],
+ [1570774237, "54"],
+ [1570774297, "54"],
+ [1570774357, "54"],
+ [1570774417, "54"],
+ [1570774477, "54"],
+ [1570774537, "54"],
+ [1570774597, "54"],
+ [1570774657, "54"],
+ [1570774717, "54"],
+ [1570774777, "54"],
+ [1570774837, "54"],
+ [1570774897, "54"],
+ [1570774957, "54"],
+ [1570775017, "54"],
+ [1570775077, "54"],
+ [1570775137, "54"],
+ [1570776937, "54"],
+ [1570776997, "54"],
+ [1570777057, "54"],
+ [1570777117, "54"],
+ [1570777177, "54"],
+ [1570777237, "54"],
+ [1570777297, "54"],
+ [1570777357, "54"],
+ [1570777417, "54"],
+ [1570777477, "54"],
+ [1570777537, "54"],
+ [1570777597, "54"],
+ [1570777657, "54"],
+ [1570777717, "54"],
+ [1570778017, "54"],
+ [1570778077, "54"],
+ [1570778137, "54"],
+ [1570778197, "54"],
+ [1570778257, "54"],
+ [1570778317, "54"],
+ [1570778377, "54"],
+ [1570778437, "54"],
+ [1570778497, "54"],
+ [1570778557, "54"],
+ [1570778617, "54"],
+ [1570778677, "54"],
+ [1570778737, "54"],
+ [1570778797, "54"],
+ [1570778857, "54"],
+ [1570778917, "54"],
+ [1570778977, "54"],
+ [1570779037, "54"],
+ [1570779097, "54"],
+ [1570779157, "54"],
+ [1570779217, "54"],
+ [1570779277, "54"],
+ [1570779337, "54"],
+ [1570779397, "54"],
+ [1570779457, "54"],
+ [1570779517, "54"],
+ [1570779577, "54"],
+ [1570779637, "54"],
+ [1570779697, "54"],
+ [1570779757, "54"],
+ [1570779817, "54"],
+ [1570779877, "54"],
+ [1570779937, "54"],
+ [1570779997, "54"],
+ [1570780057, "54"],
+ [1570780117, "54"],
+ [1570780177, "54"],
+ [1570780237, "54"],
+ [1570780297, "54"],
+ [1570780357, "54"],
+ [1570780417, "54"],
+ [1570780477, "54"],
+ [1570780537, "54"],
+ [1570780597, "54"],
+ [1570780657, "54"],
+ [1570780717, "54"],
+ [1570780777, "54"],
+ [1570780837, "54"],
+ [1570780897, "54"],
+ [1570780957, "54"],
+ [1570781017, "54"],
+ [1570781077, "54"],
+ [1570781137, "54"],
+ [1570781197, "54"],
+ [1570781257, "54"],
+ [1570781317, "54"],
+ [1570781377, "54"],
+ [1570781437, "54"],
+ [1570781497, "54"],
+ [1570781557, "54"],
+ [1570781617, "54"],
+ [1570781677, "54"],
+ [1570781737, "54"],
+ [1570781797, "54"],
+ [1570781857, "54"],
+ [1570781917, "54"],
+ [1570781977, "54"],
+ [1570782037, "54"],
+ [1570782097, "54"],
+ [1570782157, "54"],
+ [1570782217, "54"],
+ [1570782277, "54"],
+ [1570782337, "54"],
+ [1570782397, "54"],
+ [1570782457, "54"],
+ [1570782517, "54"],
+ [1570782577, "54"],
+ [1570782637, "54"],
+ [1570782697, "54"],
+ [1570782757, "54"],
+ [1570782817, "54"],
+ [1570782877, "54"],
+ [1570782937, "54"],
+ [1570782997, "54"],
+ [1570783057, "54"],
+ [1570783117, "54"],
+ [1570783177, "54"],
+ [1570783237, "54"],
+ [1570783297, "54"],
+ [1570783357, "54"],
+ [1570783417, "54"],
+ [1570783477, "54"],
+ [1570783537, "54"],
+ [1570783597, "54"],
+ [1570783657, "54"],
+ [1570783717, "54"],
+ [1570783777, "54"],
+ [1570783837, "54"],
+ [1570783897, "54"],
+ [1570783957, "54"],
+ [1570784017, "54"],
+ [1570784077, "54"],
+ [1570784137, "54"],
+ [1570784197, "54"],
+ [1570784257, "54"],
+ [1570784317, "54"],
+ [1570784377, "54"],
+ [1570784437, "54"],
+ [1570784497, "54"],
+ [1570784557, "54"],
+ [1570784617, "54"],
+ [1570784677, "54"],
+ [1570784737, "54"],
+ [1570784797, "54"],
+ [1570784857, "54"],
+ [1570784917, "54"],
+ [1570784977, "54"],
+ [1570785037, "54"],
+ [1570785097, "54"],
+ [1570785157, "54"],
+ [1570785217, "54"],
+ [1570785277, "54"],
+ [1570785337, "54"],
+ [1570785397, "54"],
+ [1570785457, "54"],
+ [1570785517, "54"],
+ [1570785577, "54"],
+ [1570785637, "54"],
+ [1570785697, "54"],
+ [1570785757, "54"],
+ [1570785817, "54"],
+ [1570785877, "54"],
+ [1570785937, "54"],
+ [1570785997, "54"],
+ [1570786057, "54"],
+ [1570786117, "54"],
+ [1570786177, "54"],
+ [1570786237, "54"],
+ [1570786297, "54"],
+ [1570786357, "54"],
+ [1570786417, "54"],
+ [1570786477, "54"],
+ [1570786537, "54"],
+ [1570786597, "54"],
+ [1570786657, "54"],
+ [1570786717, "54"],
+ [1570786777, "54"],
+ [1570786837, "54"],
+ [1570786897, "54"],
+ [1570786957, "53"],
+ [1570787017, "54"],
+ [1570787077, "54"],
+ [1570787137, "54"],
+ [1570787197, "54"],
+ [1570787257, "54"],
+ [1570787317, "54"],
+ [1570787377, "54"],
+ [1570787437, "54"],
+ [1570787497, "54"],
+ [1570787557, "54"],
+ [1570787617, "54"],
+ [1570787677, "54"],
+ [1570787737, "54"],
+ [1570787797, "54"],
+ [1570787857, "54"],
+ [1570787917, "54"],
+ [1570787977, "54"],
+ [1570788037, "54"],
+ [1570788097, "54"],
+ [1570788157, "54"],
+ [1570788217, "54"],
+ [1570788277, "54"],
+ [1570788337, "54"],
+ [1570788397, "54"],
+ [1570788457, "54"],
+ [1570788517, "54"],
+ [1570788577, "54"],
+ [1570788637, "54"],
+ [1570788697, "54"],
+ [1570788757, "54"],
+ [1570788817, "54"],
+ [1570788877, "54"],
+ [1570788937, "54"],
+ [1570788997, "54"],
+ [1570789057, "54"],
+ [1570789117, "54"],
+ [1570789177, "54"],
+ [1570789237, "54"],
+ [1570789297, "54"],
+ [1570789357, "54"],
+ [1570789417, "54"],
+ [1570789477, "54"],
+ [1570789537, "54"],
+ [1570789597, "54"],
+ [1570789657, "54"],
+ [1570789717, "54"],
+ [1570789777, "54"],
+ [1570789837, "54"],
+ [1570789897, "54"],
+ [1570789957, "54"],
+ [1570790017, "54"],
+ [1570790077, "54"],
+ [1570790137, "54"],
+ [1570790197, "54"],
+ [1570790257, "54"],
+ [1570790317, "54"],
+ [1570790377, "54"],
+ [1570790437, "54"],
+ [1570790497, "54"],
+ [1570790557, "54"],
+ [1570790617, "54"],
+ [1570790677, "54"],
+ [1570790737, "54"],
+ [1570790797, "54"],
+ [1570790857, "54"],
+ [1570790917, "54"],
+ [1570790977, "54"],
+ [1570791037, "54"],
+ [1570791097, "54"],
+ [1570791157, "54"],
+ [1570791217, "54"],
+ [1570791277, "54"],
+ [1570791337, "54"],
+ [1570791397, "54"],
+ [1570791457, "54"],
+ [1570791517, "54"],
+ [1570791577, "54"],
+ [1570791637, "54"],
+ [1570791697, "54"],
+ [1570791757, "54"],
+ [1570791817, "54"],
+ [1570791877, "54"],
+ [1570791937, "54"],
+ [1570791997, "54"],
+ [1570792057, "54"],
+ [1570792117, "54"],
+ [1570792177, "54"],
+ [1570792237, "54"],
+ [1570792297, "54"],
+ [1570792357, "54"],
+ [1570792417, "54"],
+ [1570792477, "54"],
+ [1570792537, "54"],
+ [1570792597, "54"],
+ [1570792657, "54"],
+ [1570792717, "54"],
+ [1570792777, "54"],
+ [1570792837, "54"],
+ [1570792897, "54"],
+ [1570792957, "54"],
+ [1570793017, "54"],
+ [1570793077, "54"],
+ [1570793137, "54"],
+ [1570793197, "54"],
+ [1570793257, "54"],
+ [1570793317, "54"],
+ [1570793377, "54"],
+ [1570793437, "54"],
+ [1570793497, "54"],
+ [1570793557, "54"],
+ [1570793617, "54"],
+ [1570793677, "54"],
+ [1570793737, "54"],
+ [1570793797, "54"],
+ [1570793857, "54"],
+ [1570793917, "54"],
+ [1570793977, "54"],
+ [1570794037, "54"],
+ [1570794097, "54"],
+ [1570794157, "54"],
+ [1570794217, "54"],
+ [1570794277, "54"],
+ [1570794337, "54"],
+ [1570794397, "54"],
+ [1570794457, "54"],
+ [1570794517, "54"],
+ [1570794577, "54"],
+ [1570794637, "54"],
+ [1570794697, "54"],
+ [1570794757, "54"],
+ [1570794817, "54"],
+ [1570794877, "54"],
+ [1570794937, "54"],
+ [1570794997, "54"],
+ [1570795057, "54"],
+ [1570795117, "54"],
+ [1570795177, "54"],
+ [1570795237, "54"],
+ [1570795297, "54"],
+ [1570795357, "54"],
+ [1570795417, "54"],
+ [1570795477, "54"],
+ [1570795537, "54"],
+ [1570795597, "54"],
+ [1570795657, "54"],
+ [1570795717, "54"],
+ [1570795777, "54"],
+ [1570795837, "54"],
+ [1570795897, "54"],
+ [1570795957, "54"],
+ [1570796017, "54"],
+ [1570796077, "54"],
+ [1570796137, "54"],
+ [1570796197, "54"],
+ [1570796257, "54"],
+ [1570796317, "54"],
+ [1570796377, "54"],
+ [1570796437, "55"],
+ [1570796497, "54"],
+ [1570796557, "54"],
+ [1570796617, "54"],
+ [1570796677, "54"],
+ [1570796737, "54"],
+ [1570796797, "54"],
+ [1570796857, "54"],
+ [1570796917, "54"],
+ [1570796977, "54"]
+ ]
+ }
+ ]
+ }
+}
diff --git a/spec/fixtures/grafana/simplified_dashboard_response.json b/spec/fixtures/grafana/simplified_dashboard_response.json
new file mode 100644
index 00000000000..b450fda082b
--- /dev/null
+++ b/spec/fixtures/grafana/simplified_dashboard_response.json
@@ -0,0 +1,40 @@
+{
+ "dashboard": {
+ "panels": [
+ {
+ "datasource": "GitLab Omnibus",
+ "id": 8,
+ "lines": true,
+ "targets": [
+ {
+ "expr": "sum(\n rate(redis_net_input_bytes_total{instance=~\"$instance\"}[$__interval])\n)",
+ "format": "time_series",
+ "interval": "1m",
+ "legendFormat": "In",
+ "refId": "A"
+ },
+ {
+ "expr": "sum(\n rate(redis_net_output_bytes_total{instance=~\"[[instance]]\"}[$__interval])\n)",
+ "format": "time_series",
+ "interval": "1m",
+ "legendFormat": "Out",
+ "refId": "B"
+ }
+ ],
+ "title": "Network I/O",
+ "type": "graph",
+ "yaxes": [{ "format": "Bps" }, { "format": "short" }]
+ }
+ ],
+ "templating": {
+ "list": [
+ {
+ "current": {
+ "value": "localhost:9121"
+ },
+ "name": "instance"
+ }
+ ]
+ }
+ }
+}
diff --git a/spec/fixtures/group_export.tar.gz b/spec/fixtures/group_export.tar.gz
new file mode 100644
index 00000000000..83e360d7cc2
--- /dev/null
+++ b/spec/fixtures/group_export.tar.gz
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/import_export/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index fbd752b7403..31805a54f2f 100644
--- a/spec/fixtures/lib/gitlab/import_export/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -80,6 +80,17 @@
"issue_id": 40
}
],
+ "zoom_meetings": [
+ {
+ "id": 1,
+ "project_id": 5,
+ "issue_id": 40,
+ "url": "https://zoom.us/j/123456789",
+ "issue_status": 1,
+ "created_at": "2016-06-14T15:02:04.418Z",
+ "updated_at": "2016-06-14T15:02:04.418Z"
+ }
+ ],
"milestone": {
"id": 1,
"title": "test milestone",
@@ -2249,7 +2260,41 @@
]
}
],
- "snippets": [],
+ "snippets": [
+ {
+ "id": 1,
+ "title": "Test snippet title",
+ "content": "x = 1",
+ "author_id": 1,
+ "project_id": 1,
+ "created_at": "2019-11-05T15:06:06.579Z",
+ "updated_at": "2019-11-05T15:06:06.579Z",
+ "file_name": "",
+ "visibility_level": 20,
+ "description": "Test snippet description",
+ "award_emoji": [
+ {
+ "id": 1,
+ "name": "thumbsup",
+ "user_id": 1,
+ "awardable_type": "Snippet",
+ "awardable_id": 1,
+ "created_at": "2019-11-05T15:37:21.287Z",
+ "updated_at": "2019-11-05T15:37:21.287Z"
+ },
+ {
+ "id": 2,
+ "name": "coffee",
+ "user_id": 1,
+ "awardable_type": "Snippet",
+ "awardable_id": 1,
+ "created_at": "2019-11-05T15:37:24.645Z",
+ "updated_at": "2019-11-05T15:37:24.645Z"
+ }
+ ],
+ "notes": []
+ }
+ ],
"releases": [],
"project_members": [
{
@@ -6669,6 +6714,25 @@
]
}
]
+ },
+ {
+ "id": 41,
+ "project_id": 5,
+ "ref": "master",
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "before_sha": null,
+ "push_data": null,
+ "created_at": "2016-03-22T15:20:35.763Z",
+ "updated_at": "2016-03-22T15:20:35.763Z",
+ "tag": null,
+ "yaml_errors": null,
+ "committed_at": null,
+ "status": "failed",
+ "started_at": null,
+ "finished_at": null,
+ "duration": null,
+ "stages": [
+ ]
}
],
"triggers": [
diff --git a/spec/fixtures/lib/gitlab/import_export/project.group.json b/spec/fixtures/lib/gitlab/import_export/group/project.json
index 47faf271cca..47faf271cca 100644
--- a/spec/fixtures/lib/gitlab/import_export/project.group.json
+++ b/spec/fixtures/lib/gitlab/import_export/group/project.json
diff --git a/spec/fixtures/lib/gitlab/import_export/project.light.json b/spec/fixtures/lib/gitlab/import_export/light/project.json
index 2971ca0f0f8..2971ca0f0f8 100644
--- a/spec/fixtures/lib/gitlab/import_export/project.light.json
+++ b/spec/fixtures/lib/gitlab/import_export/light/project.json
diff --git a/spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json b/spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json
index b028147b5eb..b028147b5eb 100644
--- a/spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json
+++ b/spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
index 9c1be32645a..ac40f2dcd13 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
@@ -1,7 +1,6 @@
{
"type": "object",
"required": [
- "unit",
"label",
"prometheus_endpoint_path"
],
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
index 1548daacd64..a16f1ef592f 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
@@ -3,7 +3,6 @@
"required": [
"title",
"y_label",
- "weight",
"metrics"
],
"properties": {
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 62ba0d36982..cef50bf553c 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -467,6 +467,26 @@ describe('Api', () => {
});
});
+ describe('user projects', () => {
+ it('fetches all projects that belong to a particular user', done => {
+ const query = 'dummy query';
+ const options = { unused: 'option' };
+ const userId = '123456';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}/projects`;
+ mock.onGet(expectedUrl).reply(200, [
+ {
+ name: 'test',
+ },
+ ]);
+
+ Api.userProjects(userId, query, options, response => {
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
+ done();
+ });
+ });
+ });
+
describe('commitPipelines', () => {
it('fetches pipelines for a given commit', done => {
const projectId = 'example/foobar';
diff --git a/spec/frontend/boards/components/issue_time_estimate_spec.js b/spec/frontend/boards/components/issue_time_estimate_spec.js
new file mode 100644
index 00000000000..0a16dfbc009
--- /dev/null
+++ b/spec/frontend/boards/components/issue_time_estimate_spec.js
@@ -0,0 +1,81 @@
+import IssueTimeEstimate from '~/boards/components/issue_time_estimate.vue';
+import boardsStore from '~/boards/stores/boards_store';
+import { shallowMount } from '@vue/test-utils';
+
+describe('Issue Time Estimate component', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ boardsStore.create();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when limitToHours is false', () => {
+ beforeEach(() => {
+ boardsStore.timeTracking.limitToHours = false;
+ wrapper = shallowMount(IssueTimeEstimate, {
+ propsData: {
+ estimate: 374460,
+ },
+ sync: false,
+ });
+ });
+
+ it('renders the correct time estimate', () => {
+ expect(
+ wrapper
+ .find('time')
+ .text()
+ .trim(),
+ ).toEqual('2w 3d 1m');
+ });
+
+ it('renders expanded time estimate in tooltip', () => {
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('2 weeks 3 days 1 minute');
+ });
+
+ it('prevents tooltip xss', done => {
+ const alertSpy = jest.spyOn(window, 'alert');
+ wrapper.setProps({ estimate: 'Foo <script>alert("XSS")</script>' });
+ wrapper.vm.$nextTick(() => {
+ expect(alertSpy).not.toHaveBeenCalled();
+ expect(
+ wrapper
+ .find('time')
+ .text()
+ .trim(),
+ ).toEqual('0m');
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('0m');
+ done();
+ });
+ });
+ });
+
+ describe('when limitToHours is true', () => {
+ beforeEach(() => {
+ boardsStore.timeTracking.limitToHours = true;
+ wrapper = shallowMount(IssueTimeEstimate, {
+ propsData: {
+ estimate: 374460,
+ },
+ sync: false,
+ });
+ });
+
+ it('renders the correct time estimate', () => {
+ expect(
+ wrapper
+ .find('time')
+ .text()
+ .trim(),
+ ).toEqual('104h 1m');
+ });
+
+ it('renders expanded time estimate in tooltip', () => {
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('104 hours 1 minute');
+ });
+ });
+});
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_spec.js
new file mode 100644
index 00000000000..ebe97769ab7
--- /dev/null
+++ b/spec/frontend/boards/issue_card_spec.js
@@ -0,0 +1,307 @@
+/* global ListAssignee, ListLabel, ListIssue */
+import { mount } from '@vue/test-utils';
+import _ from 'underscore';
+import '~/boards/models/label';
+import '~/boards/models/assignee';
+import '~/boards/models/issue';
+import '~/boards/models/list';
+import IssueCardInner from '~/boards/components/issue_card_inner.vue';
+import { listObj } from '../../javascripts/boards/mock_data';
+import store from '~/boards/stores';
+
+describe('Issue card component', () => {
+ const user = new ListAssignee({
+ id: 1,
+ name: 'testing 123',
+ username: 'test',
+ avatar: 'test_image',
+ });
+
+ const label1 = new ListLabel({
+ id: 3,
+ title: 'testing 123',
+ color: 'blue',
+ text_color: 'white',
+ description: 'test',
+ });
+
+ let wrapper;
+ let issue;
+ let list;
+
+ beforeEach(() => {
+ list = { ...listObj, type: 'label' };
+ issue = new ListIssue({
+ title: 'Testing',
+ id: 1,
+ iid: 1,
+ confidential: false,
+ labels: [list.label],
+ assignees: [],
+ reference_path: '#1',
+ real_path: '/test/1',
+ weight: 1,
+ });
+ wrapper = mount(IssueCardInner, {
+ propsData: {
+ list,
+ issue,
+ issueLinkBase: '/test',
+ rootPath: '/',
+ },
+ store,
+ sync: false,
+ });
+ });
+
+ it('renders issue title', () => {
+ expect(wrapper.find('.board-card-title').text()).toContain(issue.title);
+ });
+
+ it('includes issue base in link', () => {
+ expect(wrapper.find('.board-card-title a').attributes('href')).toContain('/test');
+ });
+
+ it('includes issue title on link', () => {
+ expect(wrapper.find('.board-card-title a').attributes('title')).toBe(issue.title);
+ });
+
+ it('does not render confidential icon', () => {
+ expect(wrapper.find('.fa-eye-flash').exists()).toBe(false);
+ });
+
+ it('renders confidential icon', done => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ confidential: true,
+ },
+ });
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.confidential-icon').exists()).toBe(true);
+ done();
+ });
+ });
+
+ it('renders issue ID with #', () => {
+ expect(wrapper.find('.board-card-number').text()).toContain(`#${issue.id}`);
+ });
+
+ describe('assignee', () => {
+ it('does not render assignee', () => {
+ expect(wrapper.find('.board-card-assignee .avatar').exists()).toBe(false);
+ });
+
+ describe('exists', () => {
+ beforeEach(done => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees: [user],
+ },
+ });
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('renders assignee', () => {
+ expect(wrapper.find('.board-card-assignee .avatar').exists()).toBe(true);
+ });
+
+ it('sets title', () => {
+ expect(wrapper.find('.js-assignee-tooltip').text()).toContain(`${user.name}`);
+ });
+
+ it('sets users path', () => {
+ expect(wrapper.find('.board-card-assignee a').attributes('href')).toBe('/test');
+ });
+
+ it('renders avatar', () => {
+ expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
+ });
+ });
+
+ describe('assignee default avatar', () => {
+ beforeEach(done => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees: [
+ new ListAssignee(
+ {
+ id: 1,
+ name: 'testing 123',
+ username: 'test',
+ },
+ 'default_avatar',
+ ),
+ ],
+ },
+ });
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('displays defaults avatar if users avatar is null', () => {
+ expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
+ expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
+ 'default_avatar?width=24',
+ );
+ });
+ });
+ });
+
+ describe('multiple assignees', () => {
+ beforeEach(done => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees: [
+ new ListAssignee({
+ id: 2,
+ name: 'user2',
+ username: 'user2',
+ avatar: 'test_image',
+ }),
+ new ListAssignee({
+ id: 3,
+ name: 'user3',
+ username: 'user3',
+ avatar: 'test_image',
+ }),
+ new ListAssignee({
+ id: 4,
+ name: 'user4',
+ username: 'user4',
+ avatar: 'test_image',
+ }),
+ ],
+ },
+ });
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('renders all three assignees', () => {
+ expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(3);
+ });
+
+ describe('more than three assignees', () => {
+ beforeEach(done => {
+ const { assignees } = wrapper.props('issue');
+ assignees.push(
+ new ListAssignee({
+ id: 5,
+ name: 'user5',
+ username: 'user5',
+ avatar: 'test_image',
+ }),
+ );
+
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees,
+ },
+ });
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('renders more avatar counter', () => {
+ expect(
+ wrapper
+ .find('.board-card-assignee .avatar-counter')
+ .text()
+ .trim(),
+ ).toEqual('+2');
+ });
+
+ it('renders two assignees', () => {
+ expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(2);
+ });
+
+ it('renders 99+ avatar counter', done => {
+ const assignees = [
+ ...wrapper.props('issue').assignees,
+ ..._.range(5, 103).map(
+ i =>
+ new ListAssignee({
+ id: i,
+ name: 'name',
+ username: 'username',
+ avatar: 'test_image',
+ }),
+ ),
+ ];
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees,
+ },
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(
+ wrapper
+ .find('.board-card-assignee .avatar-counter')
+ .text()
+ .trim(),
+ ).toEqual('99+');
+ done();
+ });
+ });
+ });
+ });
+
+ describe('labels', () => {
+ beforeEach(done => {
+ issue.addLabel(label1);
+ wrapper.setProps({ issue: { ...issue } });
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('does not render list label but renders all other labels', () => {
+ expect(wrapper.findAll('.badge').length).toBe(1);
+ });
+
+ it('renders label', () => {
+ const nodes = wrapper
+ .findAll('.badge')
+ .wrappers.map(label => label.attributes('data-original-title'));
+
+ expect(nodes.includes(label1.description)).toBe(true);
+ });
+
+ it('sets label description as title', () => {
+ expect(wrapper.find('.badge').attributes('data-original-title')).toContain(
+ label1.description,
+ );
+ });
+
+ it('sets background color of button', () => {
+ const nodes = wrapper
+ .findAll('.badge')
+ .wrappers.map(label => label.element.style.backgroundColor);
+
+ expect(nodes.includes(label1.color)).toBe(true);
+ });
+
+ it('does not render label if label does not have an ID', done => {
+ issue.addLabel(
+ new ListLabel({
+ title: 'closed',
+ }),
+ );
+ wrapper.setProps({ issue: { ...issue } });
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.findAll('.badge').length).toBe(1);
+ expect(wrapper.text()).not.toContain('closed');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
new file mode 100644
index 00000000000..38b2333e679
--- /dev/null
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -0,0 +1,21 @@
+import getters from '~/boards/stores/getters';
+
+describe('Boards - Getters', () => {
+ describe('getLabelToggleState', () => {
+ it('should return "on" when isShowingLabels is true', () => {
+ const state = {
+ isShowingLabels: true,
+ };
+
+ expect(getters.getLabelToggleState(state)).toBe('on');
+ });
+
+ it('should return "off" when isShowingLabels is false', () => {
+ const state = {
+ isShowingLabels: false,
+ };
+
+ expect(getters.getLabelToggleState(state)).toBe('off');
+ });
+ });
+});
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index 517d8781600..199e11401a9 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -10,8 +10,10 @@ import axios from '~/lib/utils/axios_utils';
import { loadHTMLFixture } from 'helpers/fixtures';
import { setTestTimeout } from 'helpers/timeout';
import $ from 'jquery';
+import initProjectSelectDropdown from '~/project_select';
jest.mock('~/lib/utils/poll');
+jest.mock('~/project_select');
const { INSTALLING, INSTALLABLE, INSTALLED, UNINSTALLING } = APPLICATION_STATUS;
@@ -44,6 +46,7 @@ describe('Clusters', () => {
afterEach(() => {
cluster.destroy();
mock.restore();
+ jest.clearAllMocks();
});
describe('class constructor', () => {
@@ -55,6 +58,10 @@ describe('Clusters', () => {
it('should call initPolling on construct', () => {
expect(cluster.initPolling).toHaveBeenCalled();
});
+
+ it('should call initProjectSelectDropdown on construct', () => {
+ expect(initProjectSelectDropdown).toHaveBeenCalled();
+ });
});
describe('toggle', () => {
@@ -279,16 +286,21 @@ describe('Clusters', () => {
});
describe('installApplication', () => {
- it.each(APPLICATIONS)('tries to install %s', applicationId => {
- jest.spyOn(cluster.service, 'installApplication').mockResolvedValueOnce();
+ it.each(APPLICATIONS)('tries to install %s', (applicationId, done) => {
+ jest.spyOn(cluster.service, 'installApplication').mockResolvedValue();
cluster.store.state.applications[applicationId].status = INSTALLABLE;
- cluster.installApplication({ id: applicationId });
-
- expect(cluster.store.state.applications[applicationId].status).toEqual(INSTALLING);
- expect(cluster.store.state.applications[applicationId].requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalledWith(applicationId, undefined);
+ // eslint-disable-next-line promise/valid-params
+ cluster
+ .installApplication({ id: applicationId })
+ .then(() => {
+ expect(cluster.store.state.applications[applicationId].status).toEqual(INSTALLING);
+ expect(cluster.store.state.applications[applicationId].requestReason).toEqual(null);
+ expect(cluster.service.installApplication).toHaveBeenCalledWith(applicationId, undefined);
+ done();
+ })
+ .catch();
});
it('sets error request status when the request fails', () => {
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index fbcab078993..49bda9539fd 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -6,6 +6,7 @@ import { APPLICATIONS_MOCK_STATE } from '../services/mock_data';
import eventHub from '~/clusters/event_hub';
import { shallowMount } from '@vue/test-utils';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
+import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
describe('Applications', () => {
let vm;
@@ -13,6 +14,10 @@ describe('Applications', () => {
beforeEach(() => {
Applications = Vue.extend(applications);
+
+ gon.features = gon.features || {};
+ gon.features.enableClusterApplicationElasticStack = true;
+ gon.features.enableClusterApplicationCrossplane = true;
});
afterEach(() => {
@@ -39,6 +44,10 @@ describe('Applications', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-cert_manager')).not.toBeNull();
});
+ it('renders a row for Crossplane', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-crossplane')).not.toBeNull();
+ });
+
it('renders a row for Prometheus', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).not.toBeNull();
});
@@ -54,6 +63,10 @@ describe('Applications', () => {
it('renders a row for Knative', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBeNull();
});
+
+ it('renders a row for Elastic Stack', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack')).not.toBeNull();
+ });
});
describe('Group cluster applications', () => {
@@ -76,6 +89,10 @@ describe('Applications', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-cert_manager')).not.toBeNull();
});
+ it('renders a row for Crossplane', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-crossplane')).not.toBeNull();
+ });
+
it('renders a row for Prometheus', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).not.toBeNull();
});
@@ -91,6 +108,10 @@ describe('Applications', () => {
it('renders a row for Knative', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBeNull();
});
+
+ it('renders a row for Elastic Stack', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack')).not.toBeNull();
+ });
});
describe('Instance cluster applications', () => {
@@ -113,6 +134,10 @@ describe('Applications', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-cert_manager')).not.toBeNull();
});
+ it('renders a row for Crossplane', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-crossplane')).not.toBeNull();
+ });
+
it('renders a row for Prometheus', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).not.toBeNull();
});
@@ -128,6 +153,10 @@ describe('Applications', () => {
it('renders a row for Knative', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBeNull();
});
+
+ it('renders a row for Elastic Stack', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack')).not.toBeNull();
+ });
});
describe('Ingress application', () => {
@@ -164,10 +193,12 @@ describe('Applications', () => {
},
helm: { title: 'Helm Tiller' },
cert_manager: { title: 'Cert-Manager' },
+ crossplane: { title: 'Crossplane', stack: '' },
runner: { title: 'GitLab Runner' },
prometheus: { title: 'Prometheus' },
jupyter: { title: 'JupyterHub', hostname: '' },
knative: { title: 'Knative', hostname: '' },
+ elastic_stack: { title: 'Elastic Stack', kibana_hostname: '' },
},
});
@@ -260,7 +291,11 @@ describe('Applications', () => {
},
});
- expect(vm.$el.querySelector('.js-hostname').getAttribute('readonly')).toEqual(null);
+ expect(
+ vm.$el
+ .querySelector('.js-cluster-application-row-jupyter .js-hostname')
+ .getAttribute('readonly'),
+ ).toEqual(null);
});
});
@@ -273,7 +308,9 @@ describe('Applications', () => {
},
});
- expect(vm.$el.querySelector('.js-hostname')).toBe(null);
+ expect(vm.$el.querySelector('.js-cluster-application-row-jupyter .js-hostname')).toBe(
+ null,
+ );
});
});
@@ -287,7 +324,11 @@ describe('Applications', () => {
},
});
- expect(vm.$el.querySelector('.js-hostname').getAttribute('readonly')).toEqual('readonly');
+ expect(
+ vm.$el
+ .querySelector('.js-cluster-application-row-jupyter .js-hostname')
+ .getAttribute('readonly'),
+ ).toEqual('readonly');
});
});
@@ -299,7 +340,9 @@ describe('Applications', () => {
});
it('does not render input', () => {
- expect(vm.$el.querySelector('.js-hostname')).toBe(null);
+ expect(vm.$el.querySelector('.js-cluster-application-row-jupyter .js-hostname')).toBe(
+ null,
+ );
});
it('renders disabled install button', () => {
@@ -361,4 +404,110 @@ describe('Applications', () => {
});
});
});
+
+ describe('Crossplane application', () => {
+ const propsData = {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ crossplane: {
+ title: 'Crossplane',
+ stack: {
+ code: '',
+ },
+ },
+ },
+ };
+
+ let wrapper;
+ beforeEach(() => {
+ wrapper = shallowMount(Applications, { propsData });
+ });
+ afterEach(() => {
+ wrapper.destroy();
+ });
+ it('renders the correct Component', () => {
+ const crossplane = wrapper.find(CrossplaneProviderStack);
+ expect(crossplane.exists()).toBe(true);
+ });
+ });
+
+ describe('Elastic Stack application', () => {
+ describe('with ingress installed with ip & elastic stack installable', () => {
+ it('renders hostname active input', () => {
+ vm = mountComponent(Applications, {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ ingress: {
+ title: 'Ingress',
+ status: 'installed',
+ externalIp: '1.1.1.1',
+ },
+ },
+ });
+
+ expect(
+ vm.$el
+ .querySelector('.js-cluster-application-row-elastic_stack .js-hostname')
+ .getAttribute('readonly'),
+ ).toEqual(null);
+ });
+ });
+
+ describe('with ingress installed without external ip', () => {
+ it('does not render hostname input', () => {
+ vm = mountComponent(Applications, {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ ingress: { title: 'Ingress', status: 'installed' },
+ },
+ });
+
+ expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack .js-hostname')).toBe(
+ null,
+ );
+ });
+ });
+
+ describe('with ingress & elastic stack installed', () => {
+ it('renders readonly input', () => {
+ vm = mountComponent(Applications, {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
+ elastic_stack: { title: 'Elastic Stack', status: 'installed', kibana_hostname: '' },
+ },
+ });
+
+ expect(
+ vm.$el
+ .querySelector('.js-cluster-application-row-elastic_stack .js-hostname')
+ .getAttribute('readonly'),
+ ).toEqual('readonly');
+ });
+ });
+
+ describe('without ingress installed', () => {
+ beforeEach(() => {
+ vm = mountComponent(Applications, {
+ applications: APPLICATIONS_MOCK_STATE,
+ });
+ });
+
+ it('does not render input', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack .js-hostname')).toBe(
+ null,
+ );
+ });
+
+ it('renders disabled install button', () => {
+ expect(
+ vm.$el
+ .querySelector(
+ '.js-cluster-application-row-elastic_stack .js-cluster-application-install-button',
+ )
+ .getAttribute('disabled'),
+ ).toEqual('disabled');
+ });
+ });
+ });
});
diff --git a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
new file mode 100644
index 00000000000..0d234822d7b
--- /dev/null
+++ b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
@@ -0,0 +1,78 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdownItem } from '@gitlab/ui';
+import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
+
+describe('CrossplaneProviderStack component', () => {
+ let wrapper;
+
+ const defaultProps = {
+ stacks: [
+ {
+ name: 'Google Cloud Platform',
+ code: 'gcp',
+ },
+ {
+ name: 'Amazon Web Services',
+ code: 'aws',
+ },
+ ],
+ };
+
+ function createComponent(props = {}) {
+ const propsData = {
+ ...defaultProps,
+ ...props,
+ };
+
+ wrapper = shallowMount(CrossplaneProviderStack, {
+ propsData,
+ });
+ }
+
+ beforeEach(() => {
+ const crossplane = {
+ title: 'crossplane',
+ stack: '',
+ };
+ createComponent({ crossplane });
+ });
+
+ const findDropdownElements = () => wrapper.findAll(GlDropdownItem);
+ const findFirstDropdownElement = () => findDropdownElements().at(0);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders all of the available stacks in the dropdown', () => {
+ const dropdownElements = findDropdownElements();
+
+ expect(dropdownElements.length).toBe(defaultProps.stacks.length);
+
+ defaultProps.stacks.forEach((stack, index) =>
+ expect(dropdownElements.at(index).text()).toEqual(stack.name),
+ );
+ });
+
+ it('displays the correct label for the first dropdown item if a stack is selected', () => {
+ const crossplane = {
+ title: 'crossplane',
+ stack: 'gcp',
+ };
+ createComponent({ crossplane });
+ expect(wrapper.vm.dropdownText).toBe('Google Cloud Platform');
+ });
+
+ it('emits the "set" event with the selected stack value', () => {
+ const crossplane = {
+ title: 'crossplane',
+ stack: 'gcp',
+ };
+ createComponent({ crossplane });
+ findFirstDropdownElement().vm.$emit('click');
+ expect(wrapper.emitted().set[0][0].code).toEqual('gcp');
+ });
+ it('it renders the correct dropdown text when no stack is selected', () => {
+ expect(wrapper.vm.dropdownText).toBe('Select Stack');
+ });
+});
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index 41ad398e924..016f5a259b5 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -52,6 +52,18 @@ const CLUSTERS_MOCK_DATA = {
email: 'test@example.com',
can_uninstall: false,
},
+ {
+ name: 'crossplane',
+ status: APPLICATION_STATUS.ERROR,
+ status_reason: 'Cannot connect',
+ can_uninstall: false,
+ },
+ {
+ name: 'elastic_stack',
+ status: APPLICATION_STATUS.ERROR,
+ status_reason: 'Cannot connect',
+ can_uninstall: false,
+ },
],
},
},
@@ -98,6 +110,17 @@ const CLUSTERS_MOCK_DATA = {
status_reason: 'Cannot connect',
email: 'test@example.com',
},
+ {
+ name: 'crossplane',
+ status: APPLICATION_STATUS.ERROR,
+ status_reason: 'Cannot connect',
+ stack: 'gcp',
+ },
+ {
+ name: 'elastic_stack',
+ status: APPLICATION_STATUS.ERROR,
+ status_reason: 'Cannot connect',
+ },
],
},
},
@@ -105,11 +128,13 @@ const CLUSTERS_MOCK_DATA = {
POST: {
'/gitlab-org/gitlab-shell/clusters/1/applications/helm': {},
'/gitlab-org/gitlab-shell/clusters/1/applications/ingress': {},
+ '/gitlab-org/gitlab-shell/clusters/1/applications/crossplane': {},
'/gitlab-org/gitlab-shell/clusters/1/applications/cert_manager': {},
'/gitlab-org/gitlab-shell/clusters/1/applications/runner': {},
'/gitlab-org/gitlab-shell/clusters/1/applications/prometheus': {},
'/gitlab-org/gitlab-shell/clusters/1/applications/jupyter': {},
'/gitlab-org/gitlab-shell/clusters/1/applications/knative': {},
+ '/gitlab-org/gitlab-shell/clusters/1/applications/elastic_stack': {},
},
};
@@ -126,11 +151,13 @@ const DEFAULT_APPLICATION_STATE = {
const APPLICATIONS_MOCK_STATE = {
helm: { title: 'Helm Tiller', status: 'installable' },
ingress: { title: 'Ingress', status: 'installable' },
+ crossplane: { title: 'Crossplane', status: 'installable', stack: '' },
cert_manager: { title: 'Cert-Manager', status: 'installable' },
runner: { title: 'GitLab Runner' },
prometheus: { title: 'Prometheus' },
jupyter: { title: 'JupyterHub', status: 'installable', hostname: '' },
knative: { title: 'Knative ', status: 'installable', hostname: '' },
+ elastic_stack: { title: 'Elastic Stack', status: 'installable', kibana_hostname: '' },
};
export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE, APPLICATIONS_MOCK_STATE };
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index 5ee06eb44c9..71d4daceb75 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -71,6 +71,7 @@ describe('Clusters Store', () => {
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
+ validationError: null,
},
ingress: {
title: 'Ingress',
@@ -84,6 +85,7 @@ describe('Clusters Store', () => {
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
+ validationError: null,
},
runner: {
title: 'GitLab Runner',
@@ -100,6 +102,7 @@ describe('Clusters Store', () => {
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
+ validationError: null,
},
prometheus: {
title: 'Prometheus',
@@ -111,6 +114,7 @@ describe('Clusters Store', () => {
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
+ validationError: null,
},
jupyter: {
title: 'JupyterHub',
@@ -123,6 +127,7 @@ describe('Clusters Store', () => {
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
+ validationError: null,
},
knative: {
title: 'Knative',
@@ -140,6 +145,7 @@ describe('Clusters Store', () => {
uninstallFailed: false,
updateSuccessful: false,
updateFailed: false,
+ validationError: null,
},
cert_manager: {
title: 'Cert-Manager',
@@ -152,6 +158,32 @@ describe('Clusters Store', () => {
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
+ validationError: null,
+ },
+ elastic_stack: {
+ title: 'Elastic Stack',
+ status: APPLICATION_STATUS.INSTALLABLE,
+ installFailed: true,
+ statusReason: mockResponseData.applications[7].status_reason,
+ requestReason: null,
+ kibana_hostname: '',
+ installed: false,
+ uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
+ validationError: null,
+ },
+ crossplane: {
+ title: 'Crossplane',
+ status: APPLICATION_STATUS.INSTALLABLE,
+ installFailed: true,
+ statusReason: mockResponseData.applications[8].status_reason,
+ requestReason: null,
+ installed: false,
+ uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
+ validationError: null,
},
},
environments: [],
@@ -183,5 +215,16 @@ describe('Clusters Store', () => {
`jupyter.${store.state.applications.ingress.externalIp}.nip.io`,
);
});
+
+ it('sets default hostname for elastic stack when ingress has a ip address', () => {
+ const mockResponseData =
+ CLUSTERS_MOCK_DATA.GET['/gitlab-org/gitlab-shell/clusters/2/status.json'].data;
+
+ store.updateStateFromServer(mockResponseData);
+
+ expect(store.state.applications.elastic_stack.kibana_hostname).toEqual(
+ `kibana.${store.state.applications.ingress.externalIp}.nip.io`,
+ );
+ });
});
});
diff --git a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
index 47bdc677068..3c603c7f573 100644
--- a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
+++ b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
@@ -26,7 +26,7 @@ exports[`Confidential merge request project form group component renders empty s
>
fork the project
</a>
- and set the forks visiblity to private.
+ and set the forks visibility to private.
</span>
<gllink-stub
@@ -76,7 +76,7 @@ exports[`Confidential merge request project form group component renders fork dr
>
fork the project
</a>
- and set the forks visiblity to private.
+ and set the forks visibility to private.
</span>
<gllink-stub
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
new file mode 100644
index 00000000000..b87afdd7eb4
--- /dev/null
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -0,0 +1,47 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Contributors charts should render charts when loading completed and there is chart data 1`] = `
+<div>
+ <div
+ class="contributors-charts"
+ >
+ <h4>
+ Commits to master
+ </h4>
+
+ <span>
+ Excluding merge commits. Limited to 6,000 commits.
+ </span>
+
+ <div>
+ <glareachart-stub
+ data="[object Object]"
+ height="264"
+ option="[object Object]"
+ />
+ </div>
+
+ <div
+ class="row"
+ >
+ <div
+ class="col-6"
+ >
+ <h4>
+ John
+ </h4>
+
+ <p>
+ 2 commits (jawnnypoo@gmail.com)
+ </p>
+
+ <glareachart-stub
+ data="[object Object]"
+ height="216"
+ option="[object Object]"
+ />
+ </div>
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
new file mode 100644
index 00000000000..fdba09ed26c
--- /dev/null
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -0,0 +1,69 @@
+import Vue from 'vue';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { createStore } from '~/contributors/stores';
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import ContributorsCharts from '~/contributors/components/contributors.vue';
+
+const localVue = createLocalVue();
+let wrapper;
+let mock;
+let store;
+const Component = Vue.extend(ContributorsCharts);
+const endpoint = 'contributors';
+const branch = 'master';
+const chartData = [
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
+];
+
+function factory() {
+ mock = new MockAdapter(axios);
+ jest.spyOn(axios, 'get');
+ mock.onGet().reply(200, chartData);
+ store = createStore();
+
+ wrapper = shallowMount(Component, {
+ propsData: {
+ endpoint,
+ branch,
+ },
+ stubs: {
+ GlLoadingIcon: true,
+ GlAreaChart: true,
+ },
+ store,
+ });
+}
+
+describe('Contributors charts', () => {
+ beforeEach(() => {
+ factory();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ it('should fetch chart data when mounted', () => {
+ expect(axios.get).toHaveBeenCalledWith(endpoint);
+ });
+
+ it('should display loader whiled loading data', () => {
+ wrapper.vm.$store.state.loading = true;
+ return localVue.nextTick(() => {
+ expect(wrapper.find('.contributors-loader').exists()).toBe(true);
+ });
+ });
+
+ it('should render charts when loading completed and there is chart data', () => {
+ wrapper.vm.$store.state.loading = false;
+ wrapper.vm.$store.state.chartData = chartData;
+ return localVue.nextTick(() => {
+ expect(wrapper.find('.contributors-loader').exists()).toBe(false);
+ expect(wrapper.find('.contributors-charts').exists()).toBe(true);
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+});
diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js
new file mode 100644
index 00000000000..bb017e0ac0f
--- /dev/null
+++ b/spec/frontend/contributors/store/actions_spec.js
@@ -0,0 +1,60 @@
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import flashError from '~/flash';
+import * as actions from '~/contributors/stores/actions';
+import * as types from '~/contributors/stores/mutation_types';
+
+jest.mock('~/flash.js');
+
+describe('Contributors store actions', () => {
+ describe('fetchChartData', () => {
+ let mock;
+ const endpoint = '/contributors';
+ const chartData = { '2017-11': 0, '2017-12': 2 };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ it('should commit SET_CHART_DATA with received response', done => {
+ mock.onGet().reply(200, chartData);
+
+ testAction(
+ actions.fetchChartData,
+ { endpoint },
+ {},
+ [
+ { type: types.SET_LOADING_STATE, payload: true },
+ { type: types.SET_CHART_DATA, payload: chartData },
+ { type: types.SET_LOADING_STATE, payload: false },
+ ],
+ [],
+ () => {
+ mock.restore();
+ done();
+ },
+ );
+ });
+
+ it('should show flash on API error', done => {
+ mock.onGet().reply(400, 'Not Found');
+
+ testAction(
+ actions.fetchChartData,
+ { endpoint },
+ {},
+ [{ type: types.SET_LOADING_STATE, payload: true }],
+ [],
+ () => {
+ expect(flashError).toHaveBeenCalledWith(expect.stringMatching('error'));
+ mock.restore();
+ done();
+ },
+ );
+ });
+ });
+});
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/spec/frontend/contributors/store/getters_spec.js b/spec/frontend/contributors/store/getters_spec.js
new file mode 100644
index 00000000000..62ae9b36f87
--- /dev/null
+++ b/spec/frontend/contributors/store/getters_spec.js
@@ -0,0 +1,73 @@
+import * as getters from '~/contributors/stores/getters';
+
+describe('Contributors Store Getters', () => {
+ const state = {};
+
+ describe('showChart', () => {
+ it('should NOT show chart if loading', () => {
+ state.loading = true;
+
+ expect(getters.showChart(state)).toEqual(false);
+ });
+
+ it('should NOT show chart there is not data', () => {
+ state.loading = false;
+ state.chartData = null;
+
+ expect(getters.showChart(state)).toEqual(false);
+ });
+
+ it('should show the chart in case loading complated and there is data', () => {
+ state.loading = false;
+ state.chartData = true;
+
+ expect(getters.showChart(state)).toEqual(true);
+ });
+
+ describe('parsedData', () => {
+ let parsed;
+
+ beforeAll(() => {
+ state.chartData = [
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
+ { author_name: 'Carlson', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
+ { author_name: 'Carlson', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
+ { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
+ ];
+ parsed = getters.parsedData(state);
+ });
+
+ it('should group contributions by date ', () => {
+ expect(parsed.total).toMatchObject({ '2019-05-05': 3, '2019-03-03': 2, '2019-04-04': 2 });
+ });
+
+ it('should group contributions by author ', () => {
+ expect(parsed.byAuthor).toMatchObject({
+ Carlson: {
+ email: 'jawnnypoo@gmail.com',
+ commits: 2,
+ dates: {
+ '2019-03-03': 1,
+ '2019-05-05': 1,
+ },
+ },
+ John: {
+ email: 'jawnnypoo@gmail.com',
+ commits: 5,
+ dates: {
+ '2019-03-03': 1,
+ '2019-04-04': 2,
+ '2019-05-05': 2,
+ },
+ },
+ });
+ });
+ });
+ });
+});
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/spec/frontend/contributors/store/mutations_spec.js b/spec/frontend/contributors/store/mutations_spec.js
new file mode 100644
index 00000000000..e9e756d4a65
--- /dev/null
+++ b/spec/frontend/contributors/store/mutations_spec.js
@@ -0,0 +1,40 @@
+import state from '~/contributors/stores/state';
+import mutations from '~/contributors/stores/mutations';
+import * as types from '~/contributors/stores/mutation_types';
+
+describe('Contributors mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('SET_LOADING_STATE', () => {
+ it('should set loading flag', () => {
+ const loading = true;
+ mutations[types.SET_LOADING_STATE](stateCopy, loading);
+
+ expect(stateCopy.loading).toEqual(loading);
+ });
+ });
+
+ describe('SET_CHART_DATA', () => {
+ const chartData = { '2017-11': 0, '2017-12': 2 };
+
+ it('should set chart data', () => {
+ mutations[types.SET_CHART_DATA](stateCopy, chartData);
+
+ expect(stateCopy.chartData).toEqual(chartData);
+ });
+ });
+
+ describe('SET_ACTIVE_BRANCH', () => {
+ it('should set search query', () => {
+ const branch = 'feature-branch';
+
+ mutations[types.SET_ACTIVE_BRANCH](stateCopy, branch);
+
+ expect(stateCopy.branch).toEqual(branch);
+ });
+ });
+});
diff --git a/spec/frontend/contributors/utils_spec.js b/spec/frontend/contributors/utils_spec.js
new file mode 100644
index 00000000000..a2b9154329b
--- /dev/null
+++ b/spec/frontend/contributors/utils_spec.js
@@ -0,0 +1,21 @@
+import * as utils from '~/contributors/utils';
+
+describe('Contributors Util Functions', () => {
+ describe('xAxisLabelFormatter', () => {
+ it('should return year if the date is in January', () => {
+ expect(utils.xAxisLabelFormatter(new Date('01-12-2019'))).toEqual('2019');
+ });
+
+ it('should return month name otherwise', () => {
+ expect(utils.xAxisLabelFormatter(new Date('12-02-2019'))).toEqual('Dec');
+ expect(utils.xAxisLabelFormatter(new Date('07-12-2019'))).toEqual('Jul');
+ });
+ });
+
+ describe('dateFormatter', () => {
+ it('should format provided date to YYYY-MM-DD format', () => {
+ expect(utils.dateFormatter(new Date('December 17, 1995 03:24:00'))).toEqual('1995-12-17');
+ expect(utils.dateFormatter(new Date(1565308800000))).toEqual('2019-08-09');
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js b/spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js
index 366c2fc7b26..efbe2635fcc 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import ClusterFormDropdown from '~/create_cluster/eks_cluster/components/cluster_form_dropdown.vue';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
-import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
+import { GlIcon } from '@gitlab/ui';
describe('ClusterFormDropdown', () => {
let vm;
@@ -41,24 +41,50 @@ describe('ClusterFormDropdown', () => {
.trigger('click');
});
- it('displays selected item label', () => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
+ it('emits input event with selected item', () => {
+ expect(vm.emitted('input')[0]).toEqual([secondItem.value]);
+ });
+ });
+
+ describe('when multiple items are selected', () => {
+ const value = [1];
+
+ beforeEach(() => {
+ vm.setProps({ items, multiple: true, value });
+ vm.findAll('.js-dropdown-item')
+ .at(0)
+ .trigger('click');
+ vm.findAll('.js-dropdown-item')
+ .at(1)
+ .trigger('click');
+ });
+
+ it('emits input event with an array of selected items', () => {
+ expect(vm.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
+ });
+ });
+
+ describe('when multiple items can be selected', () => {
+ beforeEach(() => {
+ vm.setProps({ items, multiple: true, value: firstItem.value });
});
- it('sets selected value to dropdown hidden input', () => {
- expect(vm.find(DropdownHiddenInput).props('value')).toEqual(secondItem.value);
+ it('displays a checked GlIcon next to the item', () => {
+ expect(vm.find(GlIcon).is('.invisible')).toBe(false);
+ expect(vm.find(GlIcon).props('name')).toBe('mobile-issue-close');
});
});
describe('when an item is selected and has a custom label property', () => {
it('displays selected item custom label', () => {
const labelProperty = 'customLabel';
- const selectedItem = { [labelProperty]: 'Name' };
+ const label = 'Name';
+ const currentValue = 1;
+ const customLabelItems = [{ [labelProperty]: label, value: currentValue }];
- vm.setProps({ labelProperty });
- vm.setData({ selectedItem });
+ vm.setProps({ labelProperty, items: customLabelItems, value: currentValue });
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(selectedItem[labelProperty]);
+ expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js b/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
new file mode 100644
index 00000000000..4bf3ac430f5
--- /dev/null
+++ b/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
@@ -0,0 +1,91 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import CreateEksCluster from '~/create_cluster/eks_cluster/components/create_eks_cluster.vue';
+import EksClusterConfigurationForm from '~/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue';
+import ServiceCredentialsForm from '~/create_cluster/eks_cluster/components/service_credentials_form.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('CreateEksCluster', () => {
+ let vm;
+ let state;
+ const gitlabManagedClusterHelpPath = 'gitlab-managed-cluster-help-path';
+ const accountAndExternalIdsHelpPath = 'account-and-external-id-help-path';
+ const createRoleArnHelpPath = 'role-arn-help-path';
+ const kubernetesIntegrationHelpPath = 'kubernetes-integration';
+ const externalLinkIcon = 'external-link';
+
+ beforeEach(() => {
+ state = { hasCredentials: false };
+ const store = new Vuex.Store({
+ state,
+ });
+
+ vm = shallowMount(CreateEksCluster, {
+ propsData: {
+ gitlabManagedClusterHelpPath,
+ accountAndExternalIdsHelpPath,
+ createRoleArnHelpPath,
+ externalLinkIcon,
+ kubernetesIntegrationHelpPath,
+ },
+ localVue,
+ store,
+ });
+ });
+ afterEach(() => vm.destroy());
+
+ describe('when credentials are provided', () => {
+ beforeEach(() => {
+ state.hasCredentials = true;
+ });
+
+ it('displays eks cluster configuration form when credentials are valid', () => {
+ expect(vm.find(EksClusterConfigurationForm).exists()).toBe(true);
+ });
+
+ describe('passes to the cluster configuration form', () => {
+ it('help url for kubernetes integration documentation', () => {
+ expect(vm.find(EksClusterConfigurationForm).props('gitlabManagedClusterHelpPath')).toBe(
+ gitlabManagedClusterHelpPath,
+ );
+ });
+
+ it('help url for gitlab managed cluster documentation', () => {
+ expect(vm.find(EksClusterConfigurationForm).props('kubernetesIntegrationHelpPath')).toBe(
+ kubernetesIntegrationHelpPath,
+ );
+ });
+ });
+ });
+
+ describe('when credentials are invalid', () => {
+ beforeEach(() => {
+ state.hasCredentials = false;
+ });
+
+ it('displays service credentials form', () => {
+ expect(vm.find(ServiceCredentialsForm).exists()).toBe(true);
+ });
+
+ describe('passes to the service credentials form', () => {
+ it('help url for account and external ids', () => {
+ expect(vm.find(ServiceCredentialsForm).props('accountAndExternalIdsHelpPath')).toBe(
+ accountAndExternalIdsHelpPath,
+ );
+ });
+
+ it('external link icon', () => {
+ expect(vm.find(ServiceCredentialsForm).props('externalLinkIcon')).toBe(externalLinkIcon);
+ });
+
+ it('help url to create a role ARN', () => {
+ expect(vm.find(ServiceCredentialsForm).props('createRoleArnHelpPath')).toBe(
+ createRoleArnHelpPath,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index 69290f6dfa9..25d613d64ed 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -4,7 +4,6 @@ import Vue from 'vue';
import { GlFormCheckbox } from '@gitlab/ui';
import EksClusterConfigurationForm from '~/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue';
-import RegionDropdown from '~/create_cluster/eks_cluster/components/region_dropdown.vue';
import eksClusterFormState from '~/create_cluster/eks_cluster/store/state';
import clusterDropdownStoreState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state';
@@ -21,17 +20,21 @@ describe('EksClusterConfigurationForm', () => {
let subnetsState;
let keyPairsState;
let securityGroupsState;
+ let instanceTypesState;
let vpcsActions;
let rolesActions;
let regionsActions;
let subnetsActions;
let keyPairsActions;
let securityGroupsActions;
+ let instanceTypesActions;
let vm;
beforeEach(() => {
state = eksClusterFormState();
actions = {
+ signOut: jest.fn(),
+ createCluster: jest.fn(),
setClusterName: jest.fn(),
setEnvironmentScope: jest.fn(),
setKubernetesVersion: jest.fn(),
@@ -41,6 +44,8 @@ describe('EksClusterConfigurationForm', () => {
setRole: jest.fn(),
setKeyPair: jest.fn(),
setSecurityGroup: jest.fn(),
+ setInstanceType: jest.fn(),
+ setNodeCount: jest.fn(),
setGitlabManagedCluster: jest.fn(),
};
regionsActions = {
@@ -61,6 +66,9 @@ describe('EksClusterConfigurationForm', () => {
securityGroupsActions = {
fetchItems: jest.fn(),
};
+ instanceTypesActions = {
+ fetchItems: jest.fn(),
+ };
rolesState = {
...clusterDropdownStoreState(),
};
@@ -79,6 +87,9 @@ describe('EksClusterConfigurationForm', () => {
securityGroupsState = {
...clusterDropdownStoreState(),
};
+ instanceTypesState = {
+ ...clusterDropdownStoreState(),
+ };
store = new Vuex.Store({
state,
actions,
@@ -113,6 +124,11 @@ describe('EksClusterConfigurationForm', () => {
state: securityGroupsState,
actions: securityGroupsActions,
},
+ instanceTypes: {
+ namespaced: true,
+ state: instanceTypesState,
+ actions: instanceTypesActions,
+ },
},
});
});
@@ -124,6 +140,7 @@ describe('EksClusterConfigurationForm', () => {
propsData: {
gitlabManagedClusterHelpPath: '',
kubernetesIntegrationHelpPath: '',
+ externalLinkIcon: '',
},
});
});
@@ -132,15 +149,34 @@ describe('EksClusterConfigurationForm', () => {
vm.destroy();
});
+ const setAllConfigurationFields = () => {
+ store.replaceState({
+ ...state,
+ clusterName: 'cluster name',
+ environmentScope: '*',
+ selectedRegion: 'region',
+ selectedRole: 'role',
+ selectedKeyPair: 'key pair',
+ selectedVpc: 'vpc',
+ selectedSubnet: 'subnet',
+ selectedSecurityGroup: 'group',
+ selectedInstanceType: 'small-1',
+ });
+ };
+
+ const findSignOutButton = () => vm.find('.js-sign-out');
+ const findCreateClusterButton = () => vm.find('.js-create-cluster');
const findClusterNameInput = () => vm.find('[id=eks-cluster-name]');
const findEnvironmentScopeInput = () => vm.find('[id=eks-environment-scope]');
const findKubernetesVersionDropdown = () => vm.find('[field-id="eks-kubernetes-version"]');
- const findRegionDropdown = () => vm.find(RegionDropdown);
+ const findRegionDropdown = () => vm.find('[field-id="eks-region"]');
const findKeyPairDropdown = () => vm.find('[field-id="eks-key-pair"]');
const findVpcDropdown = () => vm.find('[field-id="eks-vpc"]');
const findSubnetDropdown = () => vm.find('[field-id="eks-subnet"]');
const findRoleDropdown = () => vm.find('[field-id="eks-role"]');
const findSecurityGroupDropdown = () => vm.find('[field-id="eks-security-group"]');
+ const findInstanceTypeDropdown = () => vm.find('[field-id="eks-instance-type"');
+ const findNodeCountInput = () => vm.find('[id="eks-node-count"]');
const findGitlabManagedClusterCheckbox = () => vm.find(GlFormCheckbox);
describe('when mounted', () => {
@@ -151,6 +187,15 @@ describe('EksClusterConfigurationForm', () => {
it('fetches available roles', () => {
expect(rolesActions.fetchItems).toHaveBeenCalled();
});
+
+ it('fetches available instance types', () => {
+ expect(instanceTypesActions.fetchItems).toHaveBeenCalled();
+ });
+ });
+
+ it('dispatches signOut action when sign out button is clicked', () => {
+ findSignOutButton().trigger('click');
+ expect(actions.signOut).toHaveBeenCalled();
});
it('sets isLoadingRoles to RoleDropdown loading property', () => {
@@ -180,11 +225,13 @@ describe('EksClusterConfigurationForm', () => {
});
it('sets regions to RegionDropdown regions property', () => {
- expect(findRegionDropdown().props('regions')).toBe(regionsState.items);
+ expect(findRegionDropdown().props('items')).toBe(regionsState.items);
});
it('sets loadingRegionsError to RegionDropdown error property', () => {
- expect(findRegionDropdown().props('error')).toBe(regionsState.loadingItemsError);
+ regionsState.loadingItemsError = new Error();
+
+ expect(findRegionDropdown().props('hasErrors')).toEqual(true);
});
it('disables KeyPairDropdown when no region is selected', () => {
@@ -329,6 +376,34 @@ describe('EksClusterConfigurationForm', () => {
undefined,
);
});
+
+ it('cleans selected vpc', () => {
+ expect(actions.setVpc).toHaveBeenCalledWith(expect.anything(), { vpc: null }, undefined);
+ });
+
+ it('cleans selected key pair', () => {
+ expect(actions.setKeyPair).toHaveBeenCalledWith(
+ expect.anything(),
+ { keyPair: null },
+ undefined,
+ );
+ });
+
+ it('cleans selected subnet', () => {
+ expect(actions.setSubnet).toHaveBeenCalledWith(
+ expect.anything(),
+ { subnet: null },
+ undefined,
+ );
+ });
+
+ it('cleans selected security group', () => {
+ expect(actions.setSecurityGroup).toHaveBeenCalledWith(
+ expect.anything(),
+ { securityGroup: null },
+ undefined,
+ );
+ });
});
it('dispatches setClusterName when cluster name input changes', () => {
@@ -381,8 +456,10 @@ describe('EksClusterConfigurationForm', () => {
describe('when vpc is selected', () => {
const vpc = { name: 'vpc-1' };
+ const region = 'east-1';
beforeEach(() => {
+ state.selectedRegion = region;
findVpcDropdown().vm.$emit('input', vpc);
});
@@ -390,14 +467,34 @@ describe('EksClusterConfigurationForm', () => {
expect(actions.setVpc).toHaveBeenCalledWith(expect.anything(), { vpc }, undefined);
});
+ it('cleans selected subnet', () => {
+ expect(actions.setSubnet).toHaveBeenCalledWith(
+ expect.anything(),
+ { subnet: null },
+ undefined,
+ );
+ });
+
+ it('cleans selected security group', () => {
+ expect(actions.setSecurityGroup).toHaveBeenCalledWith(
+ expect.anything(),
+ { securityGroup: null },
+ undefined,
+ );
+ });
+
it('dispatches fetchSubnets action', () => {
- expect(subnetsActions.fetchItems).toHaveBeenCalledWith(expect.anything(), { vpc }, undefined);
+ expect(subnetsActions.fetchItems).toHaveBeenCalledWith(
+ expect.anything(),
+ { vpc, region },
+ undefined,
+ );
});
it('dispatches fetchSecurityGroups action', () => {
expect(securityGroupsActions.fetchItems).toHaveBeenCalledWith(
expect.anything(),
- { vpc },
+ { vpc, region },
undefined,
);
});
@@ -454,4 +551,76 @@ describe('EksClusterConfigurationForm', () => {
);
});
});
+
+ describe('when instance type is selected', () => {
+ const instanceType = 'small-1';
+
+ beforeEach(() => {
+ findInstanceTypeDropdown().vm.$emit('input', instanceType);
+ });
+
+ it('dispatches setInstanceType action', () => {
+ expect(actions.setInstanceType).toHaveBeenCalledWith(
+ expect.anything(),
+ { instanceType },
+ undefined,
+ );
+ });
+ });
+
+ it('dispatches setNodeCount when node count input changes', () => {
+ const nodeCount = 5;
+
+ findNodeCountInput().vm.$emit('input', nodeCount);
+
+ expect(actions.setNodeCount).toHaveBeenCalledWith(expect.anything(), { nodeCount }, undefined);
+ });
+
+ describe('when all cluster configuration fields are set', () => {
+ beforeEach(() => {
+ setAllConfigurationFields();
+ });
+
+ it('enables create cluster button', () => {
+ expect(findCreateClusterButton().props('disabled')).toBe(false);
+ });
+ });
+
+ describe('when at least one cluster configuration field is not set', () => {
+ beforeEach(() => {
+ setAllConfigurationFields();
+ store.replaceState({
+ ...state,
+ clusterName: '',
+ });
+ });
+
+ it('disables create cluster button', () => {
+ expect(findCreateClusterButton().props('disabled')).toBe(true);
+ });
+ });
+
+ describe('when isCreatingCluster', () => {
+ beforeEach(() => {
+ setAllConfigurationFields();
+ store.replaceState({
+ ...state,
+ isCreatingCluster: true,
+ });
+ });
+
+ it('sets create cluster button as loading', () => {
+ expect(findCreateClusterButton().props('loading')).toBe(true);
+ });
+ });
+
+ describe('clicking create cluster button', () => {
+ beforeEach(() => {
+ findCreateClusterButton().vm.$emit('click');
+ });
+
+ it('dispatches createCluster action', () => {
+ expect(actions.createCluster).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/region_dropdown_spec.js b/spec/frontend/create_cluster/eks_cluster/components/region_dropdown_spec.js
deleted file mode 100644
index 0ebb5026a4b..00000000000
--- a/spec/frontend/create_cluster/eks_cluster/components/region_dropdown_spec.js
+++ /dev/null
@@ -1,55 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import ClusterFormDropdown from '~/create_cluster/eks_cluster/components/cluster_form_dropdown.vue';
-import RegionDropdown from '~/create_cluster/eks_cluster/components/region_dropdown.vue';
-
-describe('RegionDropdown', () => {
- let vm;
-
- const getClusterFormDropdown = () => vm.find(ClusterFormDropdown);
-
- beforeEach(() => {
- vm = shallowMount(RegionDropdown);
- });
- afterEach(() => vm.destroy());
-
- it('renders a cluster-form-dropdown', () => {
- expect(getClusterFormDropdown().exists()).toBe(true);
- });
-
- it('sets regions to cluster-form-dropdown items property', () => {
- const regions = [{ name: 'basic' }];
-
- vm.setProps({ regions });
-
- expect(getClusterFormDropdown().props('items')).toEqual(regions);
- });
-
- it('sets a loading text', () => {
- expect(getClusterFormDropdown().props('loadingText')).toEqual('Loading Regions');
- });
-
- it('sets a placeholder', () => {
- expect(getClusterFormDropdown().props('placeholder')).toEqual('Select a region');
- });
-
- it('sets an empty results text', () => {
- expect(getClusterFormDropdown().props('emptyText')).toEqual('No region found');
- });
-
- it('sets a search field placeholder', () => {
- expect(getClusterFormDropdown().props('searchFieldPlaceholder')).toEqual('Search regions');
- });
-
- it('sets hasErrors property', () => {
- vm.setProps({ error: {} });
-
- expect(getClusterFormDropdown().props('hasErrors')).toEqual(true);
- });
-
- it('sets an error message', () => {
- expect(getClusterFormDropdown().props('errorMessage')).toEqual(
- 'Could not load regions from your AWS account',
- );
- });
-});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js
new file mode 100644
index 00000000000..0be723b48f0
--- /dev/null
+++ b/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js
@@ -0,0 +1,117 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+
+import ServiceCredentialsForm from '~/create_cluster/eks_cluster/components/service_credentials_form.vue';
+import LoadingButton from '~/vue_shared/components/loading_button.vue';
+
+import eksClusterState from '~/create_cluster/eks_cluster/store/state';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ServiceCredentialsForm', () => {
+ let vm;
+ let state;
+ let createRoleAction;
+ const accountId = 'accountId';
+ const externalId = 'externalId';
+
+ beforeEach(() => {
+ state = Object.assign(eksClusterState(), {
+ accountId,
+ externalId,
+ });
+ createRoleAction = jest.fn();
+
+ const store = new Vuex.Store({
+ state,
+ actions: {
+ createRole: createRoleAction,
+ },
+ });
+ vm = shallowMount(ServiceCredentialsForm, {
+ propsData: {
+ accountAndExternalIdsHelpPath: '',
+ createRoleArnHelpPath: '',
+ externalLinkIcon: '',
+ },
+ localVue,
+ store,
+ });
+ });
+ afterEach(() => vm.destroy());
+
+ const findAccountIdInput = () => vm.find('#gitlab-account-id');
+ const findCopyAccountIdButton = () => vm.find('.js-copy-account-id-button');
+ const findExternalIdInput = () => vm.find('#eks-external-id');
+ const findCopyExternalIdButton = () => vm.find('.js-copy-external-id-button');
+ const findInvalidCredentials = () => vm.find('.js-invalid-credentials');
+ const findSubmitButton = () => vm.find(LoadingButton);
+ const findForm = () => vm.find('form[name="service-credentials-form"]');
+
+ it('displays provided account id', () => {
+ expect(findAccountIdInput().attributes('value')).toBe(accountId);
+ });
+
+ it('allows to copy account id', () => {
+ expect(findCopyAccountIdButton().props('text')).toBe(accountId);
+ });
+
+ it('displays provided external id', () => {
+ expect(findExternalIdInput().attributes('value')).toBe(externalId);
+ });
+
+ it('allows to copy external id', () => {
+ expect(findCopyExternalIdButton().props('text')).toBe(externalId);
+ });
+
+ it('disables submit button when role ARN is not provided', () => {
+ expect(findSubmitButton().attributes('disabled')).toBeTruthy();
+ });
+
+ it('enables submit button when role ARN is not provided', () => {
+ vm.setData({ roleArn: '123' });
+
+ expect(findSubmitButton().attributes('disabled')).toBeFalsy();
+ });
+
+ it('dispatches createRole action when form is submitted', () => {
+ findForm().trigger('submit');
+
+ expect(createRoleAction).toHaveBeenCalled();
+ });
+
+ describe('when is creating role', () => {
+ beforeEach(() => {
+ vm.setData({ roleArn: '123' }); // set role ARN to enable button
+
+ state.isCreatingRole = true;
+ });
+
+ it('disables submit button', () => {
+ expect(findSubmitButton().props('disabled')).toBe(true);
+ });
+
+ it('sets submit button as loading', () => {
+ expect(findSubmitButton().props('loading')).toBe(true);
+ });
+
+ it('displays Authenticating label on submit button', () => {
+ expect(findSubmitButton().props('label')).toBe('Authenticating');
+ });
+ });
+
+ describe('when role can’t be created', () => {
+ beforeEach(() => {
+ state.createRoleError = 'Invalid credentials';
+ });
+
+ it('displays invalid role warning banner', () => {
+ expect(findInvalidCredentials().exists()).toBe(true);
+ });
+
+ it('displays invalid role error message', () => {
+ expect(findInvalidCredentials().text()).toContain(state.createRoleError);
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js b/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js
new file mode 100644
index 00000000000..25be858dcb3
--- /dev/null
+++ b/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js
@@ -0,0 +1,152 @@
+import awsServicesFacadeFactory from '~/create_cluster/eks_cluster/services/aws_services_facade';
+import axios from '~/lib/utils/axios_utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+
+describe('awsServicesFacade', () => {
+ let apiPaths;
+ let axiosMock;
+ let awsServices;
+ let region;
+ let vpc;
+
+ beforeEach(() => {
+ apiPaths = {
+ getKeyPairsPath: '/clusters/aws/api/key_pairs',
+ getRegionsPath: '/clusters/aws/api/regions',
+ getRolesPath: '/clusters/aws/api/roles',
+ getSecurityGroupsPath: '/clusters/aws/api/security_groups',
+ getSubnetsPath: '/clusters/aws/api/subnets',
+ getVpcsPath: '/clusters/aws/api/vpcs',
+ getInstanceTypesPath: '/clusters/aws/api/instance_types',
+ };
+ region = 'west-1';
+ vpc = 'vpc-2';
+ awsServices = awsServicesFacadeFactory(apiPaths);
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ describe('when fetchRegions succeeds', () => {
+ let regions;
+ let regionsOutput;
+
+ beforeEach(() => {
+ regions = [{ region_name: 'east-1' }, { region_name: 'west-2' }];
+ regionsOutput = regions.map(({ region_name: name }) => ({ name, value: name }));
+ axiosMock.onGet(apiPaths.getRegionsPath).reply(200, { regions });
+ });
+
+ it('return list of roles where each item has a name and value', () => {
+ expect(awsServices.fetchRegions()).resolves.toEqual(regionsOutput);
+ });
+ });
+
+ describe('when fetchRoles succeeds', () => {
+ let roles;
+ let rolesOutput;
+
+ beforeEach(() => {
+ roles = [
+ { role_name: 'admin', arn: 'aws::admin' },
+ { role_name: 'read-only', arn: 'aws::read-only' },
+ ];
+ rolesOutput = roles.map(({ role_name: name, arn: value }) => ({ name, value }));
+ axiosMock.onGet(apiPaths.getRolesPath).reply(200, { roles });
+ });
+
+ it('return list of regions where each item has a name and value', () => {
+ expect(awsServices.fetchRoles()).resolves.toEqual(rolesOutput);
+ });
+ });
+
+ describe('when fetchKeyPairs succeeds', () => {
+ let keyPairs;
+ let keyPairsOutput;
+
+ beforeEach(() => {
+ keyPairs = [{ key_pair: 'key-pair' }, { key_pair: 'key-pair-2' }];
+ keyPairsOutput = keyPairs.map(({ key_name: name }) => ({ name, value: name }));
+ axiosMock
+ .onGet(apiPaths.getKeyPairsPath, { params: { region } })
+ .reply(200, { key_pairs: keyPairs });
+ });
+
+ it('return list of key pairs where each item has a name and value', () => {
+ expect(awsServices.fetchKeyPairs({ region })).resolves.toEqual(keyPairsOutput);
+ });
+ });
+
+ describe('when fetchVpcs succeeds', () => {
+ let vpcs;
+ let vpcsOutput;
+
+ beforeEach(() => {
+ vpcs = [{ vpc_id: 'vpc-1' }, { vpc_id: 'vpc-2' }];
+ vpcsOutput = vpcs.map(({ vpc_id: name }) => ({ name, value: name }));
+ axiosMock.onGet(apiPaths.getVpcsPath, { params: { region } }).reply(200, { vpcs });
+ });
+
+ it('return list of vpcs where each item has a name and value', () => {
+ expect(awsServices.fetchVpcs({ region })).resolves.toEqual(vpcsOutput);
+ });
+ });
+
+ describe('when fetchSubnets succeeds', () => {
+ let subnets;
+ let subnetsOutput;
+
+ beforeEach(() => {
+ subnets = [{ subnet_id: 'vpc-1' }, { subnet_id: 'vpc-2' }];
+ subnetsOutput = subnets.map(({ subnet_id }) => ({ name: subnet_id, value: subnet_id }));
+ axiosMock
+ .onGet(apiPaths.getSubnetsPath, { params: { region, vpc_id: vpc } })
+ .reply(200, { subnets });
+ });
+
+ it('return list of subnets where each item has a name and value', () => {
+ expect(awsServices.fetchSubnets({ region, vpc })).resolves.toEqual(subnetsOutput);
+ });
+ });
+
+ describe('when fetchSecurityGroups succeeds', () => {
+ let securityGroups;
+ let securityGroupsOutput;
+
+ beforeEach(() => {
+ securityGroups = [
+ { group_name: 'admin group', group_id: 'group-1' },
+ { group_name: 'basic group', group_id: 'group-2' },
+ ];
+ securityGroupsOutput = securityGroups.map(({ group_id: value, group_name: name }) => ({
+ name,
+ value,
+ }));
+ axiosMock
+ .onGet(apiPaths.getSecurityGroupsPath, { params: { region, vpc_id: vpc } })
+ .reply(200, { security_groups: securityGroups });
+ });
+
+ it('return list of security groups where each item has a name and value', () => {
+ expect(awsServices.fetchSecurityGroups({ region, vpc })).resolves.toEqual(
+ securityGroupsOutput,
+ );
+ });
+ });
+
+ describe('when fetchInstanceTypes succeeds', () => {
+ let instanceTypes;
+ let instanceTypesOutput;
+
+ beforeEach(() => {
+ instanceTypes = [{ instance_type_name: 't2.small' }, { instance_type_name: 't2.medium' }];
+ instanceTypesOutput = instanceTypes.map(({ instance_type_name }) => ({
+ name: instance_type_name,
+ value: instance_type_name,
+ }));
+ axiosMock.onGet(apiPaths.getInstanceTypesPath).reply(200, { instance_types: instanceTypes });
+ });
+
+ it('return list of instance types where each item has a name and value', () => {
+ expect(awsServices.fetchInstanceTypes()).resolves.toEqual(instanceTypesOutput);
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
index 1ed7f806804..cf6c317a2df 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
@@ -13,7 +13,20 @@ import {
SET_ROLE,
SET_SECURITY_GROUP,
SET_GITLAB_MANAGED_CLUSTER,
+ SET_INSTANCE_TYPE,
+ SET_NODE_COUNT,
+ REQUEST_CREATE_ROLE,
+ CREATE_ROLE_SUCCESS,
+ CREATE_ROLE_ERROR,
+ REQUEST_CREATE_CLUSTER,
+ CREATE_CLUSTER_ERROR,
+ SIGN_OUT,
} from '~/create_cluster/eks_cluster/store/mutation_types';
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import createFlash from '~/flash';
+
+jest.mock('~/flash');
describe('EKS Cluster Store Actions', () => {
let clusterName;
@@ -25,19 +38,43 @@ describe('EKS Cluster Store Actions', () => {
let role;
let keyPair;
let securityGroup;
+ let instanceType;
+ let nodeCount;
let gitlabManagedCluster;
+ let mock;
+ let state;
+ let newClusterUrl;
beforeEach(() => {
clusterName = 'my cluster';
environmentScope = 'production';
kubernetesVersion = '11.1';
- region = { name: 'regions-1' };
- vpc = { name: 'vpc-1' };
- subnet = { name: 'subnet-1' };
- role = { name: 'role-1' };
- keyPair = { name: 'key-pair-1' };
- securityGroup = { name: 'default group' };
+ region = 'regions-1';
+ vpc = 'vpc-1';
+ subnet = 'subnet-1';
+ role = 'role-1';
+ keyPair = 'key-pair-1';
+ securityGroup = 'default group';
+ instanceType = 'small-1';
+ nodeCount = '5';
gitlabManagedCluster = true;
+
+ newClusterUrl = '/clusters/1';
+
+ state = {
+ ...createState(),
+ createRolePath: '/clusters/roles/',
+ signOutPath: '/aws/signout',
+ createClusterPath: '/clusters/',
+ };
+ });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
});
it.each`
@@ -51,10 +88,207 @@ describe('EKS Cluster Store Actions', () => {
${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
${'setSecurityGroup'} | ${SET_SECURITY_GROUP} | ${{ securityGroup }} | ${'securityGroup'}
+ ${'setInstanceType'} | ${SET_INSTANCE_TYPE} | ${{ instanceType }} | ${'instance type'}
+ ${'setNodeCount'} | ${SET_NODE_COUNT} | ${{ nodeCount }} | ${'node count'}
${'setGitlabManagedCluster'} | ${SET_GITLAB_MANAGED_CLUSTER} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
`(`$action commits $mutation with $payloadDescription payload`, data => {
const { action, mutation, payload } = data;
- testAction(actions[action], payload, createState(), [{ type: mutation, payload }]);
+ testAction(actions[action], payload, state, [{ type: mutation, payload }]);
+ });
+
+ describe('createRole', () => {
+ const payload = {
+ roleArn: 'role_arn',
+ externalId: 'externalId',
+ };
+
+ describe('when request succeeds', () => {
+ beforeEach(() => {
+ mock
+ .onPost(state.createRolePath, {
+ role_arn: payload.roleArn,
+ role_external_id: payload.externalId,
+ })
+ .reply(201);
+ });
+
+ it('dispatches createRoleSuccess action', () =>
+ testAction(
+ actions.createRole,
+ payload,
+ state,
+ [],
+ [{ type: 'requestCreateRole' }, { type: 'createRoleSuccess' }],
+ ));
+ });
+
+ describe('when request fails', () => {
+ let error;
+
+ beforeEach(() => {
+ error = new Error('Request failed with status code 400');
+ mock
+ .onPost(state.createRolePath, {
+ role_arn: payload.roleArn,
+ role_external_id: payload.externalId,
+ })
+ .reply(400, error);
+ });
+
+ it('dispatches createRoleError action', () =>
+ testAction(
+ actions.createRole,
+ payload,
+ state,
+ [],
+ [{ type: 'requestCreateRole' }, { type: 'createRoleError', payload: { error } }],
+ ));
+ });
+ });
+
+ describe('requestCreateRole', () => {
+ it('commits requestCreaterole mutation', () => {
+ testAction(actions.requestCreateRole, null, state, [{ type: REQUEST_CREATE_ROLE }]);
+ });
+ });
+
+ describe('createRoleSuccess', () => {
+ it('commits createRoleSuccess mutation', () => {
+ testAction(actions.createRoleSuccess, null, state, [{ type: CREATE_ROLE_SUCCESS }]);
+ });
+ });
+
+ describe('createRoleError', () => {
+ it('commits createRoleError mutation', () => {
+ const payload = {
+ error: new Error(),
+ };
+
+ testAction(actions.createRoleError, payload, state, [{ type: CREATE_ROLE_ERROR, payload }]);
+ });
+ });
+
+ describe('createCluster', () => {
+ let requestPayload;
+
+ beforeEach(() => {
+ requestPayload = {
+ name: clusterName,
+ environment_scope: environmentScope,
+ managed: gitlabManagedCluster,
+ provider_aws_attributes: {
+ region,
+ vpc_id: vpc,
+ subnet_ids: subnet,
+ role_arn: role,
+ key_name: keyPair,
+ security_group_id: securityGroup,
+ instance_type: instanceType,
+ num_nodes: nodeCount,
+ },
+ };
+ state = Object.assign(createState(), {
+ clusterName,
+ environmentScope,
+ kubernetesVersion,
+ selectedRegion: region,
+ selectedVpc: vpc,
+ selectedSubnet: subnet,
+ selectedRole: role,
+ selectedKeyPair: keyPair,
+ selectedSecurityGroup: securityGroup,
+ selectedInstanceType: instanceType,
+ nodeCount,
+ gitlabManagedCluster,
+ });
+ });
+
+ describe('when request succeeds', () => {
+ beforeEach(() => {
+ mock.onPost(state.createClusterPath, requestPayload).reply(201, null, {
+ location: '/clusters/1',
+ });
+ });
+
+ it('dispatches createClusterSuccess action', () =>
+ testAction(
+ actions.createCluster,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestCreateCluster' },
+ { type: 'createClusterSuccess', payload: newClusterUrl },
+ ],
+ ));
+ });
+
+ describe('when request fails', () => {
+ let response;
+
+ beforeEach(() => {
+ response = 'Request failed with status code 400';
+ mock.onPost(state.createClusterPath, requestPayload).reply(400, response);
+ });
+
+ it('dispatches createRoleError action', () =>
+ testAction(
+ actions.createCluster,
+ null,
+ state,
+ [],
+ [{ type: 'requestCreateCluster' }, { type: 'createClusterError', payload: response }],
+ ));
+ });
+ });
+
+ describe('requestCreateCluster', () => {
+ it('commits requestCreateCluster mutation', () => {
+ testAction(actions.requestCreateCluster, null, state, [{ type: REQUEST_CREATE_CLUSTER }]);
+ });
+ });
+
+ describe('createClusterSuccess', () => {
+ beforeEach(() => {
+ jest.spyOn(window.location, 'assign').mockImplementation(() => {});
+ });
+ afterEach(() => {
+ window.location.assign.mockRestore();
+ });
+
+ it('redirects to the new cluster URL', () => {
+ actions.createClusterSuccess(null, newClusterUrl);
+
+ expect(window.location.assign).toHaveBeenCalledWith(newClusterUrl);
+ });
+ });
+
+ describe('createClusterError', () => {
+ let payload;
+
+ beforeEach(() => {
+ payload = { name: ['Create cluster failed'] };
+ });
+
+ it('commits createClusterError mutation', () => {
+ testAction(actions.createClusterError, payload, state, [
+ { type: CREATE_CLUSTER_ERROR, payload },
+ ]);
+ });
+
+ it('creates a flash that displays the create cluster error', () => {
+ expect(createFlash).toHaveBeenCalledWith(payload.name[0]);
+ });
+ });
+
+ describe('signOut', () => {
+ beforeEach(() => {
+ mock.onDelete(state.signOutPath).reply(200, null);
+ });
+
+ it('commits signOut mutation', () => {
+ testAction(actions.signOut, null, state, [{ type: SIGN_OUT }]);
+ });
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js b/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js
index 81b65180fb5..0fb392f5eea 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js
@@ -8,7 +8,15 @@ import {
SET_SUBNET,
SET_ROLE,
SET_SECURITY_GROUP,
+ SET_INSTANCE_TYPE,
+ SET_NODE_COUNT,
SET_GITLAB_MANAGED_CLUSTER,
+ REQUEST_CREATE_ROLE,
+ CREATE_ROLE_SUCCESS,
+ CREATE_ROLE_ERROR,
+ REQUEST_CREATE_CLUSTER,
+ CREATE_CLUSTER_ERROR,
+ SIGN_OUT,
} from '~/create_cluster/eks_cluster/store/mutation_types';
import createState from '~/create_cluster/eks_cluster/store/state';
import mutations from '~/create_cluster/eks_cluster/store/mutations';
@@ -24,6 +32,8 @@ describe('Create EKS cluster store mutations', () => {
let role;
let keyPair;
let securityGroup;
+ let instanceType;
+ let nodeCount;
let gitlabManagedCluster;
beforeEach(() => {
@@ -36,6 +46,8 @@ describe('Create EKS cluster store mutations', () => {
role = { name: 'role-1' };
keyPair = { name: 'key pair' };
securityGroup = { name: 'default group' };
+ instanceType = 'small-1';
+ nodeCount = '5';
gitlabManagedCluster = false;
state = createState();
@@ -50,8 +62,10 @@ describe('Create EKS cluster store mutations', () => {
${SET_REGION} | ${'selectedRegion'} | ${{ region }} | ${region} | ${'selected region payload'}
${SET_KEY_PAIR} | ${'selectedKeyPair'} | ${{ keyPair }} | ${keyPair} | ${'selected key pair payload'}
${SET_VPC} | ${'selectedVpc'} | ${{ vpc }} | ${vpc} | ${'selected vpc payload'}
- ${SET_SUBNET} | ${'selectedSubnet'} | ${{ subnet }} | ${subnet} | ${'selected sybnet payload'}
+ ${SET_SUBNET} | ${'selectedSubnet'} | ${{ subnet }} | ${subnet} | ${'selected subnet payload'}
${SET_SECURITY_GROUP} | ${'selectedSecurityGroup'} | ${{ securityGroup }} | ${securityGroup} | ${'selected security group payload'}
+ ${SET_INSTANCE_TYPE} | ${'selectedInstanceType'} | ${{ instanceType }} | ${instanceType} | ${'selected instance type payload'}
+ ${SET_NODE_COUNT} | ${'nodeCount'} | ${{ nodeCount }} | ${nodeCount} | ${'node count payload'}
${SET_GITLAB_MANAGED_CLUSTER} | ${'gitlabManagedCluster'} | ${{ gitlabManagedCluster }} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
`(`$mutation sets $mutatedProperty to $expectedValueDescription`, data => {
const { mutation, mutatedProperty, payload, expectedValue } = data;
@@ -59,4 +73,101 @@ describe('Create EKS cluster store mutations', () => {
mutations[mutation](state, payload);
expect(state[mutatedProperty]).toBe(expectedValue);
});
+
+ describe(`mutation ${REQUEST_CREATE_ROLE}`, () => {
+ beforeEach(() => {
+ mutations[REQUEST_CREATE_ROLE](state);
+ });
+
+ it('sets isCreatingRole to true', () => {
+ expect(state.isCreatingRole).toBe(true);
+ });
+
+ it('sets createRoleError to null', () => {
+ expect(state.createRoleError).toBe(null);
+ });
+
+ it('sets hasCredentials to false', () => {
+ expect(state.hasCredentials).toBe(false);
+ });
+ });
+
+ describe(`mutation ${CREATE_ROLE_SUCCESS}`, () => {
+ beforeEach(() => {
+ mutations[CREATE_ROLE_SUCCESS](state);
+ });
+
+ it('sets isCreatingRole to false', () => {
+ expect(state.isCreatingRole).toBe(false);
+ });
+
+ it('sets createRoleError to null', () => {
+ expect(state.createRoleError).toBe(null);
+ });
+
+ it('sets hasCredentials to false', () => {
+ expect(state.hasCredentials).toBe(true);
+ });
+ });
+
+ describe(`mutation ${CREATE_ROLE_ERROR}`, () => {
+ const error = new Error();
+
+ beforeEach(() => {
+ mutations[CREATE_ROLE_ERROR](state, { error });
+ });
+
+ it('sets isCreatingRole to false', () => {
+ expect(state.isCreatingRole).toBe(false);
+ });
+
+ it('sets createRoleError to the error object', () => {
+ expect(state.createRoleError).toBe(error);
+ });
+
+ it('sets hasCredentials to false', () => {
+ expect(state.hasCredentials).toBe(false);
+ });
+ });
+
+ describe(`mutation ${REQUEST_CREATE_CLUSTER}`, () => {
+ beforeEach(() => {
+ mutations[REQUEST_CREATE_CLUSTER](state);
+ });
+
+ it('sets isCreatingCluster to true', () => {
+ expect(state.isCreatingCluster).toBe(true);
+ });
+
+ it('sets createClusterError to null', () => {
+ expect(state.createClusterError).toBe(null);
+ });
+ });
+
+ describe(`mutation ${CREATE_CLUSTER_ERROR}`, () => {
+ const error = new Error();
+
+ beforeEach(() => {
+ mutations[CREATE_CLUSTER_ERROR](state, { error });
+ });
+
+ it('sets isCreatingRole to false', () => {
+ expect(state.isCreatingCluster).toBe(false);
+ });
+
+ it('sets createRoleError to the error object', () => {
+ expect(state.createClusterError).toBe(error);
+ });
+ });
+
+ describe(`mutation ${SIGN_OUT}`, () => {
+ beforeEach(() => {
+ state.hasCredentials = true;
+ mutations[SIGN_OUT](state);
+ });
+
+ it('sets hasCredentials to false', () => {
+ expect(state.hasCredentials).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/projects/gke_cluster_namespace/gke_cluster_namespace_spec.js b/spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js
index 7b8df03d3c3..b1c25d8fff7 100644
--- a/spec/frontend/projects/gke_cluster_namespace/gke_cluster_namespace_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js
@@ -1,4 +1,4 @@
-import initGkeNamespace from '~/projects/gke_cluster_namespace';
+import initGkeNamespace from '~/create_cluster/gke_cluster_namespace';
describe('GKE cluster namespace', () => {
const changeEvent = new Event('change');
@@ -14,7 +14,7 @@ describe('GKE cluster namespace', () => {
<input class="js-gl-managed" type="checkbox" value="1" checked />
<div class="js-namespace">
<input type="text" />
- </div>
+ </div>
<div class="js-namespace-prefixed">
<input type="text" />
</div>
diff --git a/spec/frontend/create_cluster/init_create_cluster_spec.js b/spec/frontend/create_cluster/init_create_cluster_spec.js
new file mode 100644
index 00000000000..e7b9a7adde4
--- /dev/null
+++ b/spec/frontend/create_cluster/init_create_cluster_spec.js
@@ -0,0 +1,73 @@
+import initCreateCluster from '~/create_cluster/init_create_cluster';
+import initGkeDropdowns from '~/create_cluster/gke_cluster';
+import initGkeNamespace from '~/create_cluster/gke_cluster_namespace';
+import PersistentUserCallout from '~/persistent_user_callout';
+
+jest.mock('~/create_cluster/gke_cluster', () => jest.fn());
+jest.mock('~/create_cluster/gke_cluster_namespace', () => jest.fn());
+jest.mock('~/persistent_user_callout', () => ({
+ factory: jest.fn(),
+}));
+
+describe('initCreateCluster', () => {
+ let document;
+ let gon;
+
+ beforeEach(() => {
+ document = {
+ body: { dataset: {} },
+ querySelector: jest.fn(),
+ };
+ gon = { features: {} };
+ });
+ afterEach(() => {
+ initGkeDropdowns.mockReset();
+ initGkeNamespace.mockReset();
+ PersistentUserCallout.factory.mockReset();
+ });
+
+ describe.each`
+ pageSuffix | page
+ ${':clusters:new'} | ${'project:clusters:new'}
+ ${':clusters:create_gcp'} | ${'groups:clusters:create_gcp'}
+ ${':clusters:create_user'} | ${'admin:clusters:create_user'}
+ `('when cluster page ends in $pageSuffix', ({ page }) => {
+ beforeEach(() => {
+ document.body.dataset = { page };
+
+ initCreateCluster(document, gon);
+ });
+
+ it('initializes create GKE cluster app', () => {
+ expect(initGkeDropdowns).toHaveBeenCalled();
+ });
+
+ it('initializes gcp signup offer banner', () => {
+ expect(PersistentUserCallout.factory).toHaveBeenCalled();
+ });
+ });
+
+ describe('when creating a project level cluster', () => {
+ it('initializes gke namespace app', () => {
+ document.body.dataset.page = 'project:clusters:new';
+
+ initCreateCluster(document, gon);
+
+ expect(initGkeNamespace).toHaveBeenCalled();
+ });
+ });
+
+ describe.each`
+ clusterLevel | page
+ ${'group level'} | ${'groups:clusters:new'}
+ ${'instance level'} | ${'admin:clusters:create_gcp'}
+ `('when creating a $clusterLevel cluster', ({ page }) => {
+ it('does not initialize gke namespace app', () => {
+ document.body.dataset = { page };
+
+ initCreateCluster(document, gon);
+
+ expect(initGkeNamespace).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/cycle_analytics/stage_nav_item_spec.js b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
index ff079082ca7..a7a1d563e1e 100644
--- a/spec/frontend/cycle_analytics/stage_nav_item_spec.js
+++ b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
@@ -133,45 +133,19 @@ describe('StageNavItem', () => {
hasStageName();
});
- it('renders options menu', () => {
- expect(wrapper.find('.more-actions-toggle').exists()).toBe(true);
+ it('does not render options menu', () => {
+ expect(wrapper.find('.more-actions-toggle').exists()).toBe(false);
});
- describe('Default stages', () => {
- beforeEach(() => {
- wrapper = createComponent(
- { canEdit: true, isUserAllowed: true, isDefaultStage: true },
- false,
- );
- });
- it('can hide the stage', () => {
- expect(wrapper.text()).toContain('Hide stage');
- });
- it('can not edit the stage', () => {
- expect(wrapper.text()).not.toContain('Edit stage');
- });
- it('can not remove the stage', () => {
- expect(wrapper.text()).not.toContain('Remove stage');
- });
+ it('can not edit the stage', () => {
+ expect(wrapper.text()).not.toContain('Edit stage');
+ });
+ it('can not remove the stage', () => {
+ expect(wrapper.text()).not.toContain('Remove stage');
});
- describe('Custom stages', () => {
- beforeEach(() => {
- wrapper = createComponent(
- { canEdit: true, isUserAllowed: true, isDefaultStage: false },
- false,
- );
- });
- it('can edit the stage', () => {
- expect(wrapper.text()).toContain('Edit stage');
- });
- it('can remove the stage', () => {
- expect(wrapper.text()).toContain('Remove stage');
- });
-
- it('can not hide the stage', () => {
- expect(wrapper.text()).not.toContain('Hide stage');
- });
+ it('can not hide the stage', () => {
+ expect(wrapper.text()).not.toContain('Hide stage');
});
});
});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 290c0e797cb..3c6553f3547 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -41,6 +41,12 @@ class CustomEnvironment extends JSDOMEnvironment {
this.global.fixturesBasePath = `${ROOT_PATH}/tmp/tests/frontend/fixtures${IS_EE ? '-ee' : ''}`;
this.global.staticFixturesBasePath = `${ROOT_PATH}/spec/frontend/fixtures`;
+ /**
+ * window.fetch() is required by the apollo-upload-client library otherwise
+ * a ReferenceError is generated: https://github.com/jaydenseric/apollo-upload-client/issues/100
+ */
+ this.global.fetch = () => {};
+
// Not yet supported by JSDOM: https://github.com/jsdom/jsdom/issues/317
this.global.document.createRange = () => ({
setStart: () => {},
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
new file mode 100644
index 00000000000..54e8b0848a2
--- /dev/null
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -0,0 +1,105 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlLoadingIcon, GlLink } from '@gitlab/ui';
+import Stacktrace from '~/error_tracking/components/stacktrace.vue';
+import ErrorDetails from '~/error_tracking/components/error_details.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ErrorDetails', () => {
+ let store;
+ let wrapper;
+ let actions;
+ let getters;
+
+ function mountComponent() {
+ wrapper = shallowMount(ErrorDetails, {
+ localVue,
+ store,
+ propsData: {
+ issueDetailsPath: '/123/details',
+ issueStackTracePath: '/stacktrace',
+ },
+ });
+ }
+
+ beforeEach(() => {
+ actions = {
+ startPollingDetails: () => {},
+ startPollingStacktrace: () => {},
+ };
+
+ getters = {
+ sentryUrl: () => 'sentry.io',
+ stacktrace: () => [{ context: [1, 2], lineNo: 53, filename: 'index.js' }],
+ };
+
+ const state = {
+ error: {},
+ loading: true,
+ stacktraceData: {},
+ loadingStacktrace: true,
+ };
+
+ store = new Vuex.Store({
+ modules: {
+ details: {
+ namespaced: true,
+ actions,
+ state,
+ getters,
+ },
+ },
+ });
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('loading', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('should show spinner while loading', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.find(GlLink).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ });
+ });
+
+ describe('Error details', () => {
+ it('should show Sentry error details without stacktrace', () => {
+ store.state.details.loading = false;
+ store.state.details.error.id = 1;
+ mountComponent();
+ expect(wrapper.find(GlLink).exists()).toBe(true);
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ });
+
+ describe('Stacktrace', () => {
+ it('should show stacktrace', () => {
+ store.state.details.loading = false;
+ store.state.details.error.id = 1;
+ store.state.details.loadingStacktrace = false;
+ mountComponent();
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(true);
+ });
+
+ it('should NOT show stacktrace if no entries', () => {
+ store.state.details.loading = false;
+ store.state.details.loadingStacktrace = false;
+ store.getters = { 'details/sentryUrl': () => 'sentry.io', 'details/stacktrace': () => [] };
+ mountComponent();
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index ce8b8908026..1bbf23cc602 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -34,7 +34,7 @@ describe('ErrorTrackingList', () => {
beforeEach(() => {
actions = {
- getErrorList: () => {},
+ getSentryData: () => {},
startPolling: () => {},
restartPolling: jest.fn().mockName('restartPolling'),
};
@@ -45,8 +45,13 @@ describe('ErrorTrackingList', () => {
};
store = new Vuex.Store({
- actions,
- state,
+ modules: {
+ list: {
+ namespaced: true,
+ actions,
+ state,
+ },
+ },
});
});
@@ -70,7 +75,7 @@ describe('ErrorTrackingList', () => {
describe('results', () => {
beforeEach(() => {
- store.state.loading = false;
+ store.state.list.loading = false;
mountComponent();
});
@@ -84,7 +89,7 @@ describe('ErrorTrackingList', () => {
describe('no results', () => {
beforeEach(() => {
- store.state.loading = false;
+ store.state.list.loading = false;
mountComponent();
});
diff --git a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
new file mode 100644
index 00000000000..95958408770
--- /dev/null
+++ b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
@@ -0,0 +1,49 @@
+import { shallowMount } from '@vue/test-utils';
+import StackTraceEntry from '~/error_tracking/components/stacktrace_entry.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import FileIcon from '~/vue_shared/components/file_icon.vue';
+import Icon from '~/vue_shared/components/icon.vue';
+
+describe('Stacktrace Entry', () => {
+ let wrapper;
+
+ function mountComponent(props) {
+ wrapper = shallowMount(StackTraceEntry, {
+ propsData: {
+ filePath: 'sidekiq/util.rb',
+ lines: [
+ [22, ' def safe_thread(name, \u0026block)\n'],
+ [23, ' Thread.new do\n'],
+ [24, " Thread.current['sidekiq_label'] = name\n"],
+ [25, ' watchdog(name, \u0026block)\n'],
+ ],
+ errorLine: 24,
+ ...props,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('should render stacktrace entry collapsed', () => {
+ expect(wrapper.find(StackTraceEntry).exists()).toBe(true);
+ expect(wrapper.find(ClipboardButton).exists()).toBe(true);
+ expect(wrapper.find(Icon).exists()).toBe(true);
+ expect(wrapper.find(FileIcon).exists()).toBe(true);
+ expect(wrapper.element.querySelectorAll('table').length).toBe(0);
+ });
+
+ it('should render stacktrace entry table expanded', () => {
+ mountComponent({ expanded: true });
+ expect(wrapper.element.querySelectorAll('tr.line_holder').length).toBe(4);
+ expect(wrapper.element.querySelectorAll('.line_content.old').length).toBe(1);
+ });
+});
diff --git a/spec/frontend/error_tracking/components/stacktrace_spec.js b/spec/frontend/error_tracking/components/stacktrace_spec.js
new file mode 100644
index 00000000000..4f4a60acba4
--- /dev/null
+++ b/spec/frontend/error_tracking/components/stacktrace_spec.js
@@ -0,0 +1,45 @@
+import { shallowMount } from '@vue/test-utils';
+import Stacktrace from '~/error_tracking/components/stacktrace.vue';
+import StackTraceEntry from '~/error_tracking/components/stacktrace_entry.vue';
+
+describe('ErrorDetails', () => {
+ let wrapper;
+
+ const stackTraceEntry = {
+ filename: 'sidekiq/util.rb',
+ context: [
+ [22, ' def safe_thread(name, \u0026block)\n'],
+ [23, ' Thread.new do\n'],
+ [24, " Thread.current['sidekiq_label'] = name\n"],
+ [25, ' watchdog(name, \u0026block)\n'],
+ ],
+ lineNo: 24,
+ };
+
+ function mountComponent(entries) {
+ wrapper = shallowMount(Stacktrace, {
+ propsData: {
+ entries,
+ },
+ });
+ }
+
+ describe('Stacktrace', () => {
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('should render single Stacktrace entry', () => {
+ mountComponent([stackTraceEntry]);
+ expect(wrapper.findAll(StackTraceEntry).length).toBe(1);
+ });
+
+ it('should render multiple Stacktrace entry', () => {
+ const entriesNum = 3;
+ mountComponent(new Array(entriesNum).fill(stackTraceEntry));
+ expect(wrapper.findAll(StackTraceEntry).length).toBe(entriesNum);
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
new file mode 100644
index 00000000000..f72cd1e413b
--- /dev/null
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -0,0 +1,94 @@
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import createFlash from '~/flash';
+import * as actions from '~/error_tracking/store/details/actions';
+import * as types from '~/error_tracking/store/details/mutation_types';
+
+jest.mock('~/flash.js');
+let mock;
+
+describe('Sentry error details store actions', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ createFlash.mockClear();
+ });
+
+ describe('startPollingDetails', () => {
+ const endpoint = '123/details';
+ it('should commit SET_ERROR with received response', done => {
+ const payload = { error: { id: 1 } };
+ mock.onGet().reply(200, payload);
+ testAction(
+ actions.startPollingDetails,
+ { endpoint },
+ {},
+ [
+ { type: types.SET_ERROR, payload: payload.error },
+ { type: types.SET_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash on API error', done => {
+ mock.onGet().reply(400);
+
+ testAction(
+ actions.startPollingDetails,
+ { endpoint },
+ {},
+ [{ type: types.SET_LOADING, payload: false }],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ done();
+ },
+ );
+ });
+ });
+
+ describe('startPollingStacktrace', () => {
+ const endpoint = '123/stacktrace';
+ it('should commit SET_ERROR with received response', done => {
+ const payload = { error: [1, 2, 3] };
+ mock.onGet().reply(200, payload);
+ testAction(
+ actions.startPollingStacktrace,
+ { endpoint },
+ {},
+ [
+ { type: types.SET_STACKTRACE_DATA, payload: payload.error },
+ { type: types.SET_LOADING_STACKTRACE, payload: false },
+ ],
+ [],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash on API error', done => {
+ mock.onGet().reply(400);
+
+ testAction(
+ actions.startPollingStacktrace,
+ { endpoint },
+ {},
+ [{ type: types.SET_LOADING_STACKTRACE, payload: false }],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ done();
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/store/details/getters_spec.js b/spec/frontend/error_tracking/store/details/getters_spec.js
new file mode 100644
index 00000000000..ea57de5872b
--- /dev/null
+++ b/spec/frontend/error_tracking/store/details/getters_spec.js
@@ -0,0 +1,13 @@
+import * as getters from '~/error_tracking/store/details/getters';
+
+describe('Sentry error details store getters', () => {
+ const state = {
+ stacktraceData: { stack_trace_entries: [1, 2] },
+ };
+
+ describe('stacktrace', () => {
+ it('should get stacktrace', () => {
+ expect(getters.stacktrace(state)).toEqual([2, 1]);
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/store/list/getters_spec.js b/spec/frontend/error_tracking/store/list/getters_spec.js
new file mode 100644
index 00000000000..3cd7fa37d44
--- /dev/null
+++ b/spec/frontend/error_tracking/store/list/getters_spec.js
@@ -0,0 +1,33 @@
+import * as getters from '~/error_tracking/store/list/getters';
+
+describe('Error Tracking getters', () => {
+ let state;
+
+ const mockErrors = [
+ { title: 'ActiveModel::MissingAttributeError: missing attribute: encrypted_password' },
+ { title: 'Grape::Exceptions::MethodNotAllowed: Grape::Exceptions::MethodNotAllowed' },
+ { title: 'NoMethodError: undefined method `sanitize_http_headers=' },
+ { title: 'NoMethodError: undefined method `pry' },
+ ];
+
+ beforeEach(() => {
+ state = {
+ errors: mockErrors,
+ };
+ });
+
+ describe('search results', () => {
+ it('should return errors filtered by words in title matching the query', () => {
+ const filteredErrors = getters.filterErrorsByTitle(state)('NoMethod');
+
+ expect(filteredErrors).not.toContainEqual(mockErrors[0]);
+ expect(filteredErrors.length).toBe(2);
+ });
+
+ it('should not return results if there is no matching query', () => {
+ const filteredErrors = getters.filterErrorsByTitle(state)('GitLab');
+
+ expect(filteredErrors.length).toBe(0);
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/store/mutation_spec.js b/spec/frontend/error_tracking/store/list/mutation_spec.js
index 8117104bdbc..6e021185b4d 100644
--- a/spec/frontend/error_tracking/store/mutation_spec.js
+++ b/spec/frontend/error_tracking/store/list/mutation_spec.js
@@ -1,5 +1,5 @@
-import mutations from '~/error_tracking/store/mutations';
-import * as types from '~/error_tracking/store/mutation_types';
+import mutations from '~/error_tracking/store/list/mutations';
+import * as types from '~/error_tracking/store/list/mutation_types';
describe('Error tracking mutations', () => {
describe('SET_ERRORS', () => {
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index 23e57c4bbf1..bff8ad0877a 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -1,7 +1,9 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { GlButton, GlFormInput } from '@gitlab/ui';
+import { GlFormInput } from '@gitlab/ui';
+import LoadingButton from '~/vue_shared/components/loading_button.vue';
import ErrorTrackingForm from '~/error_tracking_settings/components/error_tracking_form.vue';
+import createStore from '~/error_tracking_settings/store';
import { defaultProps } from '../mock';
const localVue = createLocalVue();
@@ -9,15 +11,18 @@ localVue.use(Vuex);
describe('error tracking settings form', () => {
let wrapper;
+ let store;
function mountComponent() {
wrapper = shallowMount(ErrorTrackingForm, {
localVue,
+ store,
propsData: defaultProps,
});
}
beforeEach(() => {
+ store = createStore();
mountComponent();
});
@@ -38,7 +43,7 @@ describe('error tracking settings form', () => {
.attributes('id'),
).toBe('error-tracking-token');
- expect(wrapper.findAll(GlButton).exists()).toBe(true);
+ expect(wrapper.findAll(LoadingButton).exists()).toBe(true);
});
it('is rendered with labels and placeholders', () => {
@@ -59,9 +64,21 @@ describe('error tracking settings form', () => {
});
});
+ describe('loading projects', () => {
+ beforeEach(() => {
+ store.state.isLoadingProjects = true;
+ });
+
+ it('shows loading spinner', () => {
+ const { label, loading } = wrapper.find(LoadingButton).props();
+ expect(loading).toBe(true);
+ expect(label).toBe('Connecting');
+ });
+ });
+
describe('after a successful connection', () => {
beforeEach(() => {
- wrapper.setProps({ connectSuccessful: true });
+ store.state.connectSuccessful = true;
});
it('shows the success checkmark', () => {
@@ -77,7 +94,7 @@ describe('error tracking settings form', () => {
describe('after an unsuccessful connection', () => {
beforeEach(() => {
- wrapper.setProps({ connectError: true });
+ store.state.connectError = true;
});
it('does not show the check mark', () => {
diff --git a/spec/frontend/error_tracking_settings/store/actions_spec.js b/spec/frontend/error_tracking_settings/store/actions_spec.js
index 1eab0f7470b..e12c4e20f58 100644
--- a/spec/frontend/error_tracking_settings/store/actions_spec.js
+++ b/spec/frontend/error_tracking_settings/store/actions_spec.js
@@ -69,7 +69,14 @@ describe('error tracking settings actions', () => {
});
it('should request projects correctly', done => {
- testAction(actions.requestProjects, null, state, [{ type: types.RESET_CONNECT }], [], done);
+ testAction(
+ actions.requestProjects,
+ null,
+ state,
+ [{ type: types.SET_PROJECTS_LOADING, payload: true }, { type: types.RESET_CONNECT }],
+ [],
+ done,
+ );
});
it('should receive projects correctly', done => {
@@ -81,6 +88,7 @@ describe('error tracking settings actions', () => {
[
{ type: types.UPDATE_CONNECT_SUCCESS },
{ type: types.RECEIVE_PROJECTS, payload: testPayload },
+ { type: types.SET_PROJECTS_LOADING, payload: false },
],
[],
done,
@@ -93,7 +101,11 @@ describe('error tracking settings actions', () => {
actions.receiveProjectsError,
testPayload,
state,
- [{ type: types.UPDATE_CONNECT_ERROR }, { type: types.CLEAR_PROJECTS }],
+ [
+ { type: types.UPDATE_CONNECT_ERROR },
+ { type: types.CLEAR_PROJECTS },
+ { type: types.SET_PROJECTS_LOADING, payload: false },
+ ],
[],
done,
);
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index 8fbdb534b3d..f20c0aa3540 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -8,7 +8,23 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project, :repository, namespace: namespace, path: 'merge-requests-project') }
- let(:merge_request) { create(:merge_request, :with_diffs, source_project: project, target_project: project, description: '- [ ] Task List Item') }
+
+ # rubocop: disable Layout/TrailingWhitespace
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :with_diffs,
+ source_project: project,
+ target_project: project,
+ description: <<~MARKDOWN.strip_heredoc
+ - [ ] Task List Item
+ - [ ]
+ - [ ] Task List Item 2
+ MARKDOWN
+ )
+ end
+ # rubocop: enable Layout/TrailingWhitespace
+
let(:merged_merge_request) { create(:merge_request, :merged, source_project: project, target_project: project) }
let(:pipeline) do
create(
diff --git a/spec/frontend/fixtures/static/environments_logs.html b/spec/frontend/fixtures/static/environments_logs.html
index ccf9c364154..88bb0a3ed41 100644
--- a/spec/frontend/fixtures/static/environments_logs.html
+++ b/spec/frontend/fixtures/static/environments_logs.html
@@ -2,8 +2,8 @@
class="js-kubernetes-logs"
data-current-environment-name="production"
data-environments-path="/root/my-project/environments.json"
- data-logs-page="/root/my-project/environments/1/logs"
- data-logs-path="/root/my-project/environments/1/logs.json"
+ data-project-full-path="root/my-project"
+ data-environment-id=1
>
<div class="build-page-pod-logs">
<div class="build-trace-container prepend-top-default">
diff --git a/spec/frontend/fixtures/static/signin_tabs.html b/spec/frontend/fixtures/static/signin_tabs.html
index 7e66ab9394b..247a6b03054 100644
--- a/spec/frontend/fixtures/static/signin_tabs.html
+++ b/spec/frontend/fixtures/static/signin_tabs.html
@@ -5,4 +5,7 @@
<li>
<a href="#login-pane">Standard</a>
</li>
+<li>
+<a href="#register-pane">Register</a>
+</li>
</ul>
diff --git a/spec/frontend/fixtures/u2f.rb b/spec/frontend/fixtures/u2f.rb
index dded6ce6380..9710fbbc181 100644
--- a/spec/frontend/fixtures/u2f.rb
+++ b/spec/frontend/fixtures/u2f.rb
@@ -34,7 +34,9 @@ context 'U2F' do
before do
sign_in(user)
- allow_any_instance_of(Profiles::TwoFactorAuthsController).to receive(:build_qr_code).and_return('qrcode:blackandwhitesquares')
+ allow_next_instance_of(Profiles::TwoFactorAuthsController) do |instance|
+ allow(instance).to receive(:build_qr_code).and_return('qrcode:blackandwhitesquares')
+ end
end
it 'u2f/register.html' do
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
new file mode 100644
index 00000000000..69ad71a1efb
--- /dev/null
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -0,0 +1,101 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`grafana integration component default state to match the default snapshot 1`] = `
+<section
+ class="settings no-animate js-grafana-integration"
+ id="grafana"
+>
+ <div
+ class="settings-header"
+ >
+ <h4
+ class="js-section-header"
+ >
+
+ Grafana Authentication
+
+ </h4>
+
+ <glbutton-stub
+ class="js-settings-toggle"
+ >
+ Expand
+ </glbutton-stub>
+
+ <p
+ class="js-section-sub-header"
+ >
+
+ Embed Grafana charts in GitLab issues.
+
+ </p>
+ </div>
+
+ <div
+ class="settings-content"
+ >
+ <form>
+ <glformcheckbox-stub
+ class="mb-4"
+ id="grafana-integration-enabled"
+ >
+
+ Active
+
+ </glformcheckbox-stub>
+
+ <glformgroup-stub
+ description="Enter the base URL of the Grafana instance."
+ label="Grafana URL"
+ label-for="grafana-url"
+ >
+ <glforminput-stub
+ id="grafana-url"
+ placeholder="https://my-url.grafana.net/"
+ value="http://test.host"
+ />
+ </glformgroup-stub>
+
+ <glformgroup-stub
+ label="API Token"
+ label-for="grafana-token"
+ >
+ <glforminput-stub
+ id="grafana-token"
+ value="someToken"
+ />
+
+ <p
+ class="form-text text-muted"
+ >
+
+ Enter the Grafana API Token.
+
+ <a
+ href="https://grafana.com/docs/http_api/auth/#create-api-token"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+
+ More information
+
+ <icon-stub
+ class="vertical-align-middle"
+ name="external-link"
+ size="16"
+ />
+ </a>
+ </p>
+ </glformgroup-stub>
+
+ <glbutton-stub
+ variant="success"
+ >
+
+ Save Changes
+
+ </glbutton-stub>
+ </form>
+ </div>
+</section>
+`;
diff --git a/spec/frontend/grafana_integration/components/grafana_integration_spec.js b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
new file mode 100644
index 00000000000..c098ada0519
--- /dev/null
+++ b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
@@ -0,0 +1,125 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import GrafanaIntegration from '~/grafana_integration/components/grafana_integration.vue';
+import { createStore } from '~/grafana_integration/store';
+import axios from '~/lib/utils/axios_utils';
+import { refreshCurrentPage } from '~/lib/utils/url_utility';
+import createFlash from '~/flash';
+import { TEST_HOST } from 'helpers/test_constants';
+
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/flash');
+
+describe('grafana integration component', () => {
+ let wrapper;
+ let store;
+ const operationsSettingsEndpoint = `${TEST_HOST}/mock/ops/settings/endpoint`;
+ const grafanaIntegrationUrl = `${TEST_HOST}`;
+ const grafanaIntegrationToken = 'someToken';
+
+ beforeEach(() => {
+ store = createStore({
+ operationsSettingsEndpoint,
+ grafanaIntegrationUrl,
+ grafanaIntegrationToken,
+ });
+ });
+
+ afterEach(() => {
+ if (wrapper.destroy) {
+ wrapper.destroy();
+ createFlash.mockReset();
+ refreshCurrentPage.mockReset();
+ }
+ });
+
+ describe('default state', () => {
+ it('to match the default snapshot', () => {
+ wrapper = shallowMount(GrafanaIntegration, { store });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ it('renders header text', () => {
+ wrapper = shallowMount(GrafanaIntegration, { store });
+
+ expect(wrapper.find('.js-section-header').text()).toBe('Grafana Authentication');
+ });
+
+ describe('expand/collapse button', () => {
+ it('renders as an expand button by default', () => {
+ wrapper = shallowMount(GrafanaIntegration, { store });
+
+ const button = wrapper.find(GlButton);
+
+ expect(button.text()).toBe('Expand');
+ });
+ });
+
+ describe('sub-header', () => {
+ it('renders descriptive text', () => {
+ wrapper = shallowMount(GrafanaIntegration, { store });
+
+ expect(wrapper.find('.js-section-sub-header').text()).toContain(
+ 'Embed Grafana charts in GitLab issues.',
+ );
+ });
+ });
+
+ describe('form', () => {
+ beforeEach(() => {
+ jest.spyOn(axios, 'patch').mockImplementation();
+ });
+
+ afterEach(() => {
+ axios.patch.mockReset();
+ });
+
+ describe('submit button', () => {
+ const findSubmitButton = () => wrapper.find('.settings-content form').find(GlButton);
+
+ const endpointRequest = [
+ operationsSettingsEndpoint,
+ {
+ project: {
+ grafana_integration_attributes: {
+ grafana_url: grafanaIntegrationUrl,
+ token: grafanaIntegrationToken,
+ enabled: false,
+ },
+ },
+ },
+ ];
+
+ it('submits form on click', () => {
+ wrapper = mount(GrafanaIntegration, { store });
+ axios.patch.mockResolvedValue();
+
+ findSubmitButton(wrapper).trigger('click');
+
+ expect(axios.patch).toHaveBeenCalledWith(...endpointRequest);
+ return wrapper.vm.$nextTick().then(() => expect(refreshCurrentPage).toHaveBeenCalled());
+ });
+
+ it('creates flash banner on error', () => {
+ const message = 'mockErrorMessage';
+ wrapper = mount(GrafanaIntegration, { store });
+ axios.patch.mockRejectedValue({ response: { data: { message } } });
+
+ findSubmitButton().trigger('click');
+
+ expect(axios.patch).toHaveBeenCalledWith(...endpointRequest);
+ return wrapper.vm
+ .$nextTick()
+ .then(jest.runAllTicks)
+ .then(() =>
+ expect(createFlash).toHaveBeenCalledWith(
+ `There was an error saving your changes. ${message}`,
+ 'alert',
+ ),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/grafana_integration/store/mutations_spec.js b/spec/frontend/grafana_integration/store/mutations_spec.js
new file mode 100644
index 00000000000..18e87394189
--- /dev/null
+++ b/spec/frontend/grafana_integration/store/mutations_spec.js
@@ -0,0 +1,35 @@
+import mutations from '~/grafana_integration/store/mutations';
+import createState from '~/grafana_integration/store/state';
+
+describe('grafana integration mutations', () => {
+ let localState;
+
+ beforeEach(() => {
+ localState = createState();
+ });
+
+ describe('SET_GRAFANA_URL', () => {
+ it('sets grafanaUrl', () => {
+ const mockUrl = 'mockUrl';
+ mutations.SET_GRAFANA_URL(localState, mockUrl);
+
+ expect(localState.grafanaUrl).toBe(mockUrl);
+ });
+ });
+
+ describe('SET_GRAFANA_TOKEN', () => {
+ it('sets grafanaToken', () => {
+ const mockToken = 'mockToken';
+ mutations.SET_GRAFANA_TOKEN(localState, mockToken);
+
+ expect(localState.grafanaToken).toBe(mockToken);
+ });
+ });
+ describe('SET_GRAFANA_ENABLED', () => {
+ it('updates grafanaEnabled for integration', () => {
+ mutations.SET_GRAFANA_ENABLED(localState, true);
+
+ expect(localState.grafanaEnabled).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/helpers/monitor_helper_spec.js b/spec/frontend/helpers/monitor_helper_spec.js
index 2e8bff298c4..0798ca580e2 100644
--- a/spec/frontend/helpers/monitor_helper_spec.js
+++ b/spec/frontend/helpers/monitor_helper_spec.js
@@ -41,5 +41,87 @@ describe('monitor helper', () => {
),
).toEqual([{ ...expectedDataSeries[0], data: [[1, 1]] }]);
});
+
+ it('updates series name from templates', () => {
+ const config = {
+ ...defaultConfig,
+ name: '{{cmd}}',
+ };
+
+ const [result] = monitorHelper.makeDataSeries(
+ [{ metric: { cmd: 'brpop' }, values: series }],
+ config,
+ );
+
+ expect(result.name).toEqual('brpop');
+ });
+
+ it('supports space-padded template expressions', () => {
+ const config = {
+ ...defaultConfig,
+ name: 'backend: {{ backend }}',
+ };
+
+ const [result] = monitorHelper.makeDataSeries(
+ [{ metric: { backend: 'HA Server' }, values: series }],
+ config,
+ );
+
+ expect(result.name).toEqual('backend: HA Server');
+ });
+
+ it('supports repeated template variables', () => {
+ const config = { ...defaultConfig, name: '{{cmd}}, {{cmd}}' };
+
+ const [result] = monitorHelper.makeDataSeries(
+ [{ metric: { cmd: 'brpop' }, values: series }],
+ config,
+ );
+
+ expect(result.name).toEqual('brpop, brpop');
+ });
+
+ it('supports hyphenated template variables', () => {
+ const config = { ...defaultConfig, name: 'expired - {{ test-attribute }}' };
+
+ const [result] = monitorHelper.makeDataSeries(
+ [{ metric: { 'test-attribute': 'test-attribute-value' }, values: series }],
+ config,
+ );
+
+ expect(result.name).toEqual('expired - test-attribute-value');
+ });
+
+ it('updates multiple series names from templates', () => {
+ const config = {
+ ...defaultConfig,
+ name: '{{job}}: {{cmd}}',
+ };
+
+ const [result] = monitorHelper.makeDataSeries(
+ [{ metric: { cmd: 'brpop', job: 'redis' }, values: series }],
+ config,
+ );
+
+ expect(result.name).toEqual('redis: brpop');
+ });
+
+ it('updates name for each series', () => {
+ const config = {
+ ...defaultConfig,
+ name: '{{cmd}}',
+ };
+
+ const [firstSeries, secondSeries] = monitorHelper.makeDataSeries(
+ [
+ { metric: { cmd: 'brpop' }, values: series },
+ { metric: { cmd: 'zrangebyscore' }, values: series },
+ ],
+ config,
+ );
+
+ expect(firstSeries.name).toEqual('brpop');
+ expect(secondSeries.name).toEqual('zrangebyscore');
+ });
});
});
diff --git a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
new file mode 100644
index 00000000000..5d6c31f01d9
--- /dev/null
+++ b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
@@ -0,0 +1,61 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`IDE pipeline stage renders stage details & icon 1`] = `
+<div
+ class="ide-stage card prepend-top-default"
+>
+ <div
+ class="card-header"
+ >
+ <ciicon-stub
+ cssclasses=""
+ size="24"
+ status="[object Object]"
+ />
+
+ <strong
+ class="prepend-left-8 ide-stage-title"
+ data-container="body"
+ data-original-title=""
+ title=""
+ >
+
+ build
+
+ </strong>
+
+ <div
+ class="append-right-8 prepend-left-4"
+ >
+ <span
+ class="badge badge-pill"
+ >
+ 4
+ </span>
+ </div>
+
+ <icon-stub
+ class="ide-stage-collapse-icon"
+ name="angle-down"
+ size="16"
+ />
+ </div>
+
+ <div
+ class="card-body"
+ >
+ <item-stub
+ job="[object Object]"
+ />
+ <item-stub
+ job="[object Object]"
+ />
+ <item-stub
+ job="[object Object]"
+ />
+ <item-stub
+ job="[object Object]"
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/ide/components/jobs/stage_spec.js b/spec/frontend/ide/components/jobs/stage_spec.js
new file mode 100644
index 00000000000..2e42ab26d27
--- /dev/null
+++ b/spec/frontend/ide/components/jobs/stage_spec.js
@@ -0,0 +1,86 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import Stage from '~/ide/components/jobs/stage.vue';
+import Item from '~/ide/components/jobs/item.vue';
+import { stages, jobs } from '../../mock_data';
+
+describe('IDE pipeline stage', () => {
+ let wrapper;
+ const defaultProps = {
+ stage: {
+ ...stages[0],
+ id: 0,
+ dropdownPath: stages[0].dropdown_path,
+ jobs: [...jobs],
+ isLoading: false,
+ isCollapsed: false,
+ },
+ };
+
+ const findHeader = () => wrapper.find({ ref: 'cardHeader' });
+ const findJobList = () => wrapper.find({ ref: 'jobList' });
+
+ const createComponent = props => {
+ wrapper = shallowMount(Stage, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ sync: false,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('emits fetch event when mounted', () => {
+ createComponent();
+ expect(wrapper.emitted().fetch).toBeDefined();
+ });
+
+ it('renders loading icon when no jobs and isLoading is true', () => {
+ createComponent({
+ stage: { ...defaultProps.stage, isLoading: true, jobs: [] },
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('emits toggleCollaped event with stage id when clicking header', () => {
+ const id = 5;
+ createComponent({ stage: { ...defaultProps.stage, id } });
+ findHeader().trigger('click');
+ expect(wrapper.emitted().toggleCollapsed[0][0]).toBe(id);
+ });
+
+ it('emits clickViewLog entity with job', () => {
+ const [job] = defaultProps.stage.jobs;
+ createComponent();
+ wrapper
+ .findAll(Item)
+ .at(0)
+ .vm.$emit('clickViewLog', job);
+ expect(wrapper.emitted().clickViewLog[0][0]).toBe(job);
+ });
+
+ it('renders stage details & icon', () => {
+ createComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('when collapsed', () => {
+ beforeEach(() => {
+ createComponent({ stage: { ...defaultProps.stage, isCollapsed: true } });
+ });
+
+ it('does not render job list', () => {
+ expect(findJobList().isVisible()).toBe(false);
+ });
+
+ it('sets border bottom class', () => {
+ expect(findHeader().classes('border-bottom-0')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/preview/clientside_spec.js b/spec/frontend/ide/components/preview/clientside_spec.js
index dfc76628d0c..6a33f4998c5 100644
--- a/spec/frontend/ide/components/preview/clientside_spec.js
+++ b/spec/frontend/ide/components/preview/clientside_spec.js
@@ -24,6 +24,9 @@ describe('IDE clientside preview', () => {
getFileData: jest.fn().mockReturnValue(Promise.resolve({})),
getRawFileData: jest.fn().mockReturnValue(Promise.resolve('')),
};
+ const storeClientsideActions = {
+ pingUsage: jest.fn().mockReturnValue(Promise.resolve({})),
+ };
const waitForCalls = () => new Promise(setImmediate);
@@ -42,6 +45,12 @@ describe('IDE clientside preview', () => {
...getters,
},
actions: storeActions,
+ modules: {
+ clientside: {
+ namespaced: true,
+ actions: storeClientsideActions,
+ },
+ },
});
wrapper = shallowMount(Clientside, {
@@ -76,7 +85,8 @@ describe('IDE clientside preview', () => {
describe('with main entry', () => {
beforeEach(() => {
createComponent({ getters: { packageJson: dummyPackageJson } });
- return wrapper.vm.initPreview();
+
+ return waitForCalls();
});
it('creates sandpack manager', () => {
@@ -95,6 +105,10 @@ describe('IDE clientside preview', () => {
},
);
});
+
+ it('pings usage', () => {
+ expect(storeClientsideActions.pingUsage).toHaveBeenCalledTimes(1);
+ });
});
describe('computed', () => {
@@ -178,13 +192,13 @@ describe('IDE clientside preview', () => {
});
describe('showOpenInCodeSandbox', () => {
- it('returns true when visiblity is public', () => {
+ it('returns true when visibility is public', () => {
createComponent({ getters: { currentProject: () => ({ visibility: 'public' }) } });
expect(wrapper.vm.showOpenInCodeSandbox).toBe(true);
});
- it('returns false when visiblity is private', () => {
+ it('returns false when visibility is private', () => {
createComponent({ getters: { currentProject: () => ({ visibility: 'private' }) } });
expect(wrapper.vm.showOpenInCodeSandbox).toBe(false);
diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js
index 3d5ed4b5c0c..bb0d20bed91 100644
--- a/spec/frontend/ide/services/index_spec.js
+++ b/spec/frontend/ide/services/index_spec.js
@@ -1,11 +1,18 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import services from '~/ide/services';
import Api from '~/api';
+import { escapeFileUrl } from '~/ide/stores/utils';
jest.mock('~/api');
const TEST_PROJECT_ID = 'alice/wonderland';
const TEST_BRANCH = 'master-patch-123';
const TEST_COMMIT_SHA = '123456789';
+const TEST_FILE_PATH = 'README2.md';
+const TEST_FILE_OLD_PATH = 'OLD_README2.md';
+const TEST_FILE_PATH_SPECIAL = 'READM?ME/abc';
+const TEST_FILE_CONTENTS = 'raw file content';
describe('IDE services', () => {
describe('commit', () => {
@@ -28,4 +35,80 @@ describe('IDE services', () => {
expect(Api.commitMultiple).toHaveBeenCalledWith(TEST_PROJECT_ID, payload);
});
});
+
+ describe('getBaseRawFileData', () => {
+ let file;
+ let mock;
+
+ beforeEach(() => {
+ file = {
+ mrChange: null,
+ projectId: TEST_PROJECT_ID,
+ path: TEST_FILE_PATH,
+ };
+
+ jest.spyOn(axios, 'get');
+
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('gives back file.baseRaw for files with that property present', () => {
+ file.baseRaw = TEST_FILE_CONTENTS;
+
+ return services.getBaseRawFileData(file, TEST_COMMIT_SHA).then(content => {
+ expect(content).toEqual(TEST_FILE_CONTENTS);
+ });
+ });
+
+ it('gives back file.baseRaw for files for temp files', () => {
+ file.tempFile = true;
+ file.baseRaw = TEST_FILE_CONTENTS;
+
+ return services.getBaseRawFileData(file, TEST_COMMIT_SHA).then(content => {
+ expect(content).toEqual(TEST_FILE_CONTENTS);
+ });
+ });
+
+ describe.each`
+ relativeUrlRoot | filePath | isRenamed
+ ${''} | ${TEST_FILE_PATH} | ${false}
+ ${''} | ${TEST_FILE_OLD_PATH} | ${true}
+ ${''} | ${TEST_FILE_PATH_SPECIAL} | ${false}
+ ${''} | ${TEST_FILE_PATH_SPECIAL} | ${true}
+ ${'gitlab'} | ${TEST_FILE_OLD_PATH} | ${true}
+ `(
+ 'with relativeUrlRoot ($relativeUrlRoot) and filePath ($filePath) and isRenamed ($isRenamed)',
+ ({ relativeUrlRoot, filePath, isRenamed }) => {
+ beforeEach(() => {
+ if (isRenamed) {
+ file.mrChange = {
+ renamed_file: true,
+ old_path: filePath,
+ };
+ } else {
+ file.path = filePath;
+ }
+
+ gon.relative_url_root = relativeUrlRoot;
+
+ mock
+ .onGet(
+ `${relativeUrlRoot}/${TEST_PROJECT_ID}/raw/${TEST_COMMIT_SHA}/${escapeFileUrl(
+ filePath,
+ )}`,
+ )
+ .reply(200, TEST_FILE_CONTENTS);
+ });
+
+ it('fetches file content', () =>
+ services.getBaseRawFileData(file, TEST_COMMIT_SHA).then(content => {
+ expect(content).toEqual(TEST_FILE_CONTENTS);
+ }));
+ },
+ );
+ });
});
diff --git a/spec/frontend/ide/stores/modules/clientside/actions_spec.js b/spec/frontend/ide/stores/modules/clientside/actions_spec.js
new file mode 100644
index 00000000000..a47bc0bd711
--- /dev/null
+++ b/spec/frontend/ide/stores/modules/clientside/actions_spec.js
@@ -0,0 +1,39 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'helpers/test_constants';
+import axios from '~/lib/utils/axios_utils';
+import * as actions from '~/ide/stores/modules/clientside/actions';
+
+const TEST_PROJECT_URL = `${TEST_HOST}/lorem/ipsum`;
+const TEST_USAGE_URL = `${TEST_PROJECT_URL}/usage_ping/web_ide_clientside_preview`;
+
+describe('IDE store module clientside actions', () => {
+ let rootGetters;
+ let mock;
+
+ beforeEach(() => {
+ rootGetters = {
+ currentProject: {
+ web_url: TEST_PROJECT_URL,
+ },
+ };
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('pingUsage', () => {
+ it('posts to usage endpoint', done => {
+ const usageSpy = jest.fn(() => [200]);
+
+ mock.onPost(TEST_USAGE_URL).reply(() => usageSpy());
+
+ testAction(actions.pingUsage, null, rootGetters, [], [], () => {
+ expect(usageSpy).toHaveBeenCalled();
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap b/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
new file mode 100644
index 00000000000..f57391a6b0d
--- /dev/null
+++ b/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
@@ -0,0 +1,15 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Issuables list component with empty issues response with all state should display a catch-all if there are no issues to show 1`] = `
+<glemptystate-stub
+ description="The Issue Tracker is the place to add things that need to be improved or solved in a project. You can register or sign in to create issues for this project."
+ svgpath="/emptySvg"
+ title="There are no issues to show"
+/>
+`;
+
+exports[`Issuables list component with empty issues response with closed state should display a message "There are no closed issues" if there are no closed issues 1`] = `"There are no closed issues"`;
+
+exports[`Issuables list component with empty issues response with empty query should display the message "There are no open issues" 1`] = `"There are no open issues"`;
+
+exports[`Issuables list component with empty issues response with query in window location should display "Sorry, your filter produced no results" if filters are too specific 1`] = `"Sorry, your filter produced no results"`;
diff --git a/spec/frontend/issuables_list/components/issuable_spec.js b/spec/frontend/issuables_list/components/issuable_spec.js
new file mode 100644
index 00000000000..6148f3c68f2
--- /dev/null
+++ b/spec/frontend/issuables_list/components/issuable_spec.js
@@ -0,0 +1,345 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
+import { trimText } from 'helpers/text_helper';
+import initUserPopovers from '~/user_popovers';
+import { formatDate } from '~/lib/utils/datetime_utility';
+import { mergeUrlParams } from '~/lib/utils/url_utility';
+import Issuable from '~/issuables_list/components/issuable.vue';
+import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue';
+import { simpleIssue, testAssignees, testLabels } from '../issuable_list_test_data';
+
+jest.mock('~/user_popovers');
+
+const TEST_NOW = '2019-08-28T20:03:04.713Z';
+const TEST_MONTH_AGO = '2019-07-28';
+const TEST_MONTH_LATER = '2019-09-30';
+const DATE_FORMAT = 'mmm d, yyyy';
+const TEST_USER_NAME = 'Tyler Durden';
+const TEST_BASE_URL = `${TEST_HOST}/issues`;
+const TEST_TASK_STATUS = '50 of 100 tasks completed';
+const TEST_MILESTONE = {
+ title: 'Milestone title',
+ web_url: `${TEST_HOST}/milestone/1`,
+};
+const TEXT_CLOSED = 'CLOSED';
+const TEST_META_COUNT = 100;
+
+// Use FixedDate so that time sensitive info in snapshots don't fail
+class FixedDate extends Date {
+ constructor(date = TEST_NOW) {
+ super(date);
+ }
+}
+
+describe('Issuable component', () => {
+ let issuable;
+ let DateOrig;
+ let wrapper;
+
+ const factory = (props = {}) => {
+ wrapper = shallowMount(Issuable, {
+ propsData: {
+ issuable: simpleIssue,
+ baseUrl: TEST_BASE_URL,
+ ...props,
+ },
+ sync: false,
+ });
+ };
+
+ beforeEach(() => {
+ issuable = { ...simpleIssue };
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ beforeAll(() => {
+ DateOrig = window.Date;
+ window.Date = FixedDate;
+ });
+
+ afterAll(() => {
+ window.Date = DateOrig;
+ });
+
+ const findConfidentialIcon = () => wrapper.find('.fa-eye-slash');
+ const findTaskStatus = () => wrapper.find('.task-status');
+ const findOpenedAgoContainer = () => wrapper.find({ ref: 'openedAgoByContainer' });
+ const findMilestone = () => wrapper.find('.js-milestone');
+ const findMilestoneTooltip = () => findMilestone().attributes('data-original-title');
+ const findDueDate = () => wrapper.find('.js-due-date');
+ const findLabelContainer = () => wrapper.find('.js-labels');
+ const findLabelLinks = () => findLabelContainer().findAll(GlLink);
+ const findWeight = () => wrapper.find('.js-weight');
+ const findAssignees = () => wrapper.find(IssueAssignees);
+ const findMergeRequestsCount = () => wrapper.find('.js-merge-requests');
+ const findUpvotes = () => wrapper.find('.js-upvotes');
+ const findDownvotes = () => wrapper.find('.js-downvotes');
+ const findNotes = () => wrapper.find('.js-notes');
+ const findBulkCheckbox = () => wrapper.find('input.selected-issuable');
+
+ describe('when mounted', () => {
+ it('initializes user popovers', () => {
+ expect(initUserPopovers).not.toHaveBeenCalled();
+
+ factory();
+
+ expect(initUserPopovers).toHaveBeenCalledWith([findOpenedAgoContainer().find('a').element]);
+ });
+ });
+
+ describe('with simple issuable', () => {
+ beforeEach(() => {
+ Object.assign(issuable, {
+ has_tasks: false,
+ task_status: TEST_TASK_STATUS,
+ created_at: TEST_MONTH_AGO,
+ author: {
+ ...issuable.author,
+ name: TEST_USER_NAME,
+ },
+ labels: [],
+ });
+
+ factory({ issuable });
+ });
+
+ it.each`
+ desc | finder
+ ${'bulk editing checkbox'} | ${findBulkCheckbox}
+ ${'confidential icon'} | ${findConfidentialIcon}
+ ${'task status'} | ${findTaskStatus}
+ ${'milestone'} | ${findMilestone}
+ ${'due date'} | ${findDueDate}
+ ${'labels'} | ${findLabelContainer}
+ ${'weight'} | ${findWeight}
+ ${'merge request count'} | ${findMergeRequestsCount}
+ ${'upvotes'} | ${findUpvotes}
+ ${'downvotes'} | ${findDownvotes}
+ `('does not render $desc', ({ finder }) => {
+ expect(finder().exists()).toBe(false);
+ });
+
+ it('does not have closed text', () => {
+ expect(wrapper.text()).not.toContain(TEXT_CLOSED);
+ });
+
+ it('does not have closed class', () => {
+ expect(wrapper.classes('closed')).toBe(false);
+ });
+
+ it('renders fuzzy opened date and author', () => {
+ expect(trimText(findOpenedAgoContainer().text())).toEqual(
+ `opened 1 month ago by ${TEST_USER_NAME}`,
+ );
+ });
+
+ it('renders no comments', () => {
+ expect(findNotes().classes('no-comments')).toBe(true);
+ });
+ });
+
+ describe('with confidential issuable', () => {
+ beforeEach(() => {
+ issuable.confidential = true;
+
+ factory({ issuable });
+ });
+
+ it('renders the confidential icon', () => {
+ expect(findConfidentialIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('with task status', () => {
+ beforeEach(() => {
+ Object.assign(issuable, {
+ has_tasks: true,
+ task_status: TEST_TASK_STATUS,
+ });
+
+ factory({ issuable });
+ });
+
+ it('renders task status', () => {
+ expect(findTaskStatus().exists()).toBe(true);
+ expect(findTaskStatus().text()).toBe(TEST_TASK_STATUS);
+ });
+ });
+
+ describe.each`
+ desc | dueDate | expectedTooltipPart
+ ${'past due'} | ${TEST_MONTH_AGO} | ${'Past due'}
+ ${'future due'} | ${TEST_MONTH_LATER} | ${'1 month remaining'}
+ `('with milestone with $desc', ({ dueDate, expectedTooltipPart }) => {
+ beforeEach(() => {
+ issuable.milestone = { ...TEST_MILESTONE, due_date: dueDate };
+
+ factory({ issuable });
+ });
+
+ it('renders milestone', () => {
+ expect(findMilestone().exists()).toBe(true);
+ expect(
+ findMilestone()
+ .find('.fa-clock-o')
+ .exists(),
+ ).toBe(true);
+ expect(findMilestone().text()).toEqual(TEST_MILESTONE.title);
+ });
+
+ it('renders tooltip', () => {
+ expect(findMilestoneTooltip()).toBe(
+ `${formatDate(dueDate, DATE_FORMAT)} (${expectedTooltipPart})`,
+ );
+ });
+
+ it('renders milestone with the correct href', () => {
+ const { title } = issuable.milestone;
+ const expected = mergeUrlParams({ milestone_title: title }, TEST_BASE_URL);
+
+ expect(findMilestone().attributes('href')).toBe(expected);
+ });
+ });
+
+ describe.each`
+ dueDate | hasClass | desc
+ ${TEST_MONTH_LATER} | ${false} | ${'with future due date'}
+ ${TEST_MONTH_AGO} | ${true} | ${'with past due date'}
+ `('$desc', ({ dueDate, hasClass }) => {
+ beforeEach(() => {
+ issuable.due_date = dueDate;
+
+ factory({ issuable });
+ });
+
+ it('renders due date', () => {
+ expect(findDueDate().exists()).toBe(true);
+ expect(findDueDate().text()).toBe(formatDate(dueDate, DATE_FORMAT));
+ });
+
+ it(hasClass ? 'has cred class' : 'does not have cred class', () => {
+ expect(findDueDate().classes('cred')).toEqual(hasClass);
+ });
+ });
+
+ describe('with labels', () => {
+ beforeEach(() => {
+ issuable.labels = [...testLabels];
+
+ factory({ issuable });
+ });
+
+ it('renders labels', () => {
+ factory({ issuable });
+
+ const labels = findLabelLinks().wrappers.map(label => ({
+ href: label.attributes('href'),
+ text: label.text(),
+ tooltip: label.find('span').attributes('data-original-title'),
+ }));
+
+ const expected = testLabels.map(label => ({
+ href: mergeUrlParams({ 'label_name[]': label.name }, TEST_BASE_URL),
+ text: label.name,
+ tooltip: label.description,
+ }));
+
+ expect(labels).toEqual(expected);
+ });
+ });
+
+ describe.each`
+ weight
+ ${0}
+ ${10}
+ ${12345}
+ `('with weight $weight', ({ weight }) => {
+ beforeEach(() => {
+ issuable.weight = weight;
+
+ factory({ issuable });
+ });
+
+ it('renders weight', () => {
+ expect(findWeight().exists()).toBe(true);
+ expect(findWeight().text()).toEqual(weight.toString());
+ });
+ });
+
+ describe('with closed state', () => {
+ beforeEach(() => {
+ issuable.state = 'closed';
+
+ factory({ issuable });
+ });
+
+ it('renders closed text', () => {
+ expect(wrapper.text()).toContain(TEXT_CLOSED);
+ });
+
+ it('has closed class', () => {
+ expect(wrapper.classes('closed')).toBe(true);
+ });
+ });
+
+ describe('with assignees', () => {
+ beforeEach(() => {
+ issuable.assignees = testAssignees;
+
+ factory({ issuable });
+ });
+
+ it('renders assignees', () => {
+ expect(findAssignees().exists()).toBe(true);
+ expect(findAssignees().props('assignees')).toEqual(testAssignees);
+ });
+ });
+
+ describe.each`
+ desc | key | finder
+ ${'with merge requests count'} | ${'merge_requests_count'} | ${findMergeRequestsCount}
+ ${'with upvote count'} | ${'upvotes'} | ${findUpvotes}
+ ${'with downvote count'} | ${'downvotes'} | ${findDownvotes}
+ ${'with notes count'} | ${'user_notes_count'} | ${findNotes}
+ `('$desc', ({ key, finder }) => {
+ beforeEach(() => {
+ issuable[key] = TEST_META_COUNT;
+
+ factory({ issuable });
+ });
+
+ it('renders merge requests count', () => {
+ expect(finder().exists()).toBe(true);
+ expect(finder().text()).toBe(TEST_META_COUNT.toString());
+ expect(finder().classes('no-comments')).toBe(false);
+ });
+ });
+
+ describe('with bulk editing', () => {
+ describe.each`
+ selected | desc
+ ${true} | ${'when selected'}
+ ${false} | ${'when unselected'}
+ `('$desc', ({ selected }) => {
+ beforeEach(() => {
+ factory({ isBulkEditing: true, selected });
+ });
+
+ it(`renders checked is ${selected}`, () => {
+ expect(findBulkCheckbox().element.checked).toBe(selected);
+ });
+
+ it('emits select when clicked', () => {
+ expect(wrapper.emitted().select).toBeUndefined();
+
+ findBulkCheckbox().trigger('click');
+
+ expect(wrapper.emitted().select).toEqual([[{ issuable, selected: !selected }]]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuables_list/components/issuables_list_app_spec.js b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
new file mode 100644
index 00000000000..e598a9c5a5d
--- /dev/null
+++ b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
@@ -0,0 +1,410 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlEmptyState, GlPagination, GlSkeletonLoading } from '@gitlab/ui';
+import flash from '~/flash';
+import waitForPromises from 'helpers/wait_for_promises';
+import { TEST_HOST } from 'helpers/test_constants';
+import IssuablesListApp from '~/issuables_list/components/issuables_list_app.vue';
+import Issuable from '~/issuables_list/components/issuable.vue';
+import issueablesEventBus from '~/issuables_list/eventhub';
+import { PAGE_SIZE, PAGE_SIZE_MANUAL, RELATIVE_POSITION } from '~/issuables_list/constants';
+
+jest.mock('~/flash', () => jest.fn());
+jest.mock('~/issuables_list/eventhub');
+
+const TEST_LOCATION = `${TEST_HOST}/issues`;
+const TEST_ENDPOINT = '/issues';
+const TEST_CREATE_ISSUES_PATH = '/createIssue';
+const TEST_EMPTY_SVG_PATH = '/emptySvg';
+
+const localVue = createLocalVue();
+
+const MOCK_ISSUES = Array(PAGE_SIZE_MANUAL)
+ .fill(0)
+ .map((_, i) => ({
+ id: i,
+ web_url: `url${i}`,
+ }));
+
+describe('Issuables list component', () => {
+ let oldLocation;
+ let mockAxios;
+ let wrapper;
+ let apiSpy;
+
+ const setupApiMock = cb => {
+ apiSpy = jest.fn(cb);
+
+ mockAxios.onGet(TEST_ENDPOINT).reply(cfg => apiSpy(cfg));
+ };
+
+ const factory = (props = { sortKey: 'priority' }) => {
+ wrapper = shallowMount(localVue.extend(IssuablesListApp), {
+ propsData: {
+ endpoint: TEST_ENDPOINT,
+ createIssuePath: TEST_CREATE_ISSUES_PATH,
+ emptySvgPath: TEST_EMPTY_SVG_PATH,
+ ...props,
+ },
+ localVue,
+ sync: false,
+ });
+ };
+
+ const findLoading = () => wrapper.find(GlSkeletonLoading);
+ const findIssuables = () => wrapper.findAll(Issuable);
+ const findFirstIssuable = () => findIssuables().wrappers[0];
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+
+ oldLocation = window.location;
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: '', search: '' },
+ });
+ window.location.href = TEST_LOCATION;
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mockAxios.restore();
+ jest.clearAllMocks();
+ window.location = oldLocation;
+ });
+
+ describe('with failed issues response', () => {
+ beforeEach(() => {
+ setupApiMock(() => [500]);
+
+ factory();
+
+ return waitForPromises();
+ });
+
+ it('does not show loading', () => {
+ expect(wrapper.vm.loading).toBe(false);
+ });
+
+ it('flashes an error', () => {
+ expect(flash).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('with successful issues response', () => {
+ beforeEach(() => {
+ setupApiMock(() => [
+ 200,
+ MOCK_ISSUES.slice(0, PAGE_SIZE),
+ {
+ 'x-total': 100,
+ 'x-page': 2,
+ },
+ ]);
+ });
+
+ it('has default props and data', () => {
+ factory();
+ expect(wrapper.vm).toMatchObject({
+ // Props
+ canBulkEdit: false,
+ createIssuePath: TEST_CREATE_ISSUES_PATH,
+ emptySvgPath: TEST_EMPTY_SVG_PATH,
+
+ // Data
+ filters: {
+ state: 'opened',
+ },
+ isBulkEditing: false,
+ issuables: [],
+ loading: true,
+ page: 1,
+ selection: {},
+ totalItems: 0,
+ });
+ });
+
+ it('does not call API until mounted', () => {
+ expect(apiSpy).not.toHaveBeenCalled();
+ });
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ factory();
+ });
+
+ it('calls API', () => {
+ expect(apiSpy).toHaveBeenCalled();
+ });
+
+ it('shows loading', () => {
+ expect(findLoading().exists()).toBe(true);
+ expect(findIssuables().length).toBe(0);
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
+
+ describe('when finished loading', () => {
+ beforeEach(() => {
+ factory();
+
+ return waitForPromises();
+ });
+
+ it('does not display empty state', () => {
+ expect(wrapper.vm.issuables.length).toBeGreaterThan(0);
+ expect(wrapper.vm.emptyState).toEqual({});
+ expect(wrapper.contains(GlEmptyState)).toBe(false);
+ });
+
+ it('sets the proper page and total items', () => {
+ expect(wrapper.vm.totalItems).toBe(100);
+ expect(wrapper.vm.page).toBe(2);
+ });
+
+ it('renders one page of issuables and pagination', () => {
+ expect(findIssuables().length).toBe(PAGE_SIZE);
+ expect(wrapper.find(GlPagination).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('with bulk editing enabled', () => {
+ beforeEach(() => {
+ issueablesEventBus.$on.mockReset();
+ issueablesEventBus.$emit.mockReset();
+
+ setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
+ factory({ canBulkEdit: true });
+
+ return waitForPromises();
+ });
+
+ it('is not enabled by default', () => {
+ expect(wrapper.vm.isBulkEditing).toBe(false);
+ });
+
+ it('does not select issues by default', () => {
+ expect(wrapper.vm.selection).toEqual({});
+ });
+
+ it('"Select All" checkbox toggles all visible issuables"', () => {
+ wrapper.vm.onSelectAll();
+ expect(wrapper.vm.selection).toEqual(
+ wrapper.vm.issuables.reduce((acc, i) => ({ ...acc, [i.id]: true }), {}),
+ );
+
+ wrapper.vm.onSelectAll();
+ expect(wrapper.vm.selection).toEqual({});
+ });
+
+ it('"Select All checkbox" selects all issuables if only some are selected"', () => {
+ wrapper.vm.selection = { [wrapper.vm.issuables[0].id]: true };
+ wrapper.vm.onSelectAll();
+ expect(wrapper.vm.selection).toEqual(
+ wrapper.vm.issuables.reduce((acc, i) => ({ ...acc, [i.id]: true }), {}),
+ );
+ });
+
+ it('selects and deselects issuables', () => {
+ const [i0, i1, i2] = wrapper.vm.issuables;
+
+ expect(wrapper.vm.selection).toEqual({});
+ wrapper.vm.onSelectIssuable({ issuable: i0, selected: false });
+ expect(wrapper.vm.selection).toEqual({});
+ wrapper.vm.onSelectIssuable({ issuable: i1, selected: true });
+ expect(wrapper.vm.selection).toEqual({ '1': true });
+ wrapper.vm.onSelectIssuable({ issuable: i0, selected: true });
+ expect(wrapper.vm.selection).toEqual({ '1': true, '0': true });
+ wrapper.vm.onSelectIssuable({ issuable: i2, selected: true });
+ expect(wrapper.vm.selection).toEqual({ '1': true, '0': true, '2': true });
+ wrapper.vm.onSelectIssuable({ issuable: i2, selected: true });
+ expect(wrapper.vm.selection).toEqual({ '1': true, '0': true, '2': true });
+ wrapper.vm.onSelectIssuable({ issuable: i0, selected: false });
+ expect(wrapper.vm.selection).toEqual({ '1': true, '2': true });
+ });
+
+ it('broadcasts a message to the bulk edit sidebar when a value is added to selection', () => {
+ issueablesEventBus.$emit.mockReset();
+ const i1 = wrapper.vm.issuables[1];
+
+ wrapper.vm.onSelectIssuable({ issuable: i1, selected: true });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(issueablesEventBus.$emit).toHaveBeenCalledTimes(1);
+ expect(issueablesEventBus.$emit).toHaveBeenCalledWith('issuables:updateBulkEdit');
+ });
+ });
+
+ it('does not broadcast a message to the bulk edit sidebar when a value is not added to selection', () => {
+ issueablesEventBus.$emit.mockReset();
+
+ return wrapper.vm
+ .$nextTick()
+ .then(waitForPromises)
+ .then(() => {
+ const i1 = wrapper.vm.issuables[1];
+
+ wrapper.vm.onSelectIssuable({ issuable: i1, selected: false });
+ })
+ .then(wrapper.vm.$nextTick)
+ .then(() => {
+ expect(issueablesEventBus.$emit).toHaveBeenCalledTimes(0);
+ });
+ });
+
+ it('listens to a message to toggle bulk editing', () => {
+ expect(wrapper.vm.isBulkEditing).toBe(false);
+ expect(issueablesEventBus.$on.mock.calls[0][0]).toBe('issuables:toggleBulkEdit');
+ issueablesEventBus.$on.mock.calls[0][1](true); // Call the message handler
+
+ return waitForPromises()
+ .then(() => {
+ expect(wrapper.vm.isBulkEditing).toBe(true);
+ issueablesEventBus.$on.mock.calls[0][1](false);
+ })
+ .then(() => {
+ expect(wrapper.vm.isBulkEditing).toBe(false);
+ });
+ });
+ });
+
+ describe('with query params in window.location', () => {
+ const query =
+ '?assignee_username=root&author_username=root&confidential=yes&label_name%5B%5D=Aquapod&label_name%5B%5D=Astro&milestone_title=v3.0&my_reaction_emoji=airplane&scope=all&sort=priority&state=opened&utf8=%E2%9C%93&weight=0';
+ const expectedFilters = {
+ assignee_username: 'root',
+ author_username: 'root',
+ confidential: 'yes',
+ my_reaction_emoji: 'airplane',
+ scope: 'all',
+ state: 'opened',
+ utf8: '✓',
+ weight: '0',
+ milestone: 'v3.0',
+ labels: 'Aquapod,Astro',
+ order_by: 'milestone_due',
+ sort: 'desc',
+ };
+
+ beforeEach(() => {
+ window.location.href = `${TEST_LOCATION}${query}`;
+ window.location.search = query;
+ setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
+ factory({ sortKey: 'milestone_due_desc' });
+ return waitForPromises();
+ });
+
+ it('applies filters and sorts', () => {
+ expect(wrapper.vm.hasFilters).toBe(true);
+ expect(wrapper.vm.filters).toEqual(expectedFilters);
+
+ expect(apiSpy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ params: {
+ ...expectedFilters,
+ with_labels_details: true,
+ page: 1,
+ per_page: PAGE_SIZE,
+ },
+ }),
+ );
+ });
+
+ it('passes the base url to issuable', () => {
+ expect(findFirstIssuable().props('baseUrl')).toEqual(TEST_LOCATION);
+ });
+ });
+
+ describe('with hash in window.location', () => {
+ beforeEach(() => {
+ window.location.href = `${TEST_LOCATION}#stuff`;
+ setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
+ factory();
+ return waitForPromises();
+ });
+
+ it('passes the base url to issuable', () => {
+ expect(findFirstIssuable().props('baseUrl')).toEqual(TEST_LOCATION);
+ });
+ });
+
+ describe('with manual sort', () => {
+ beforeEach(() => {
+ setupApiMock(() => [200, MOCK_ISSUES.slice(0)]);
+ factory({ sortKey: RELATIVE_POSITION });
+ });
+
+ it('uses manual page size', () => {
+ expect(apiSpy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ params: expect.objectContaining({
+ per_page: PAGE_SIZE_MANUAL,
+ }),
+ }),
+ );
+ });
+ });
+
+ describe('with empty issues response', () => {
+ beforeEach(() => {
+ setupApiMock(() => [200, []]);
+ });
+
+ describe('with query in window location', () => {
+ beforeEach(() => {
+ window.location.search = '?weight=Any';
+
+ factory();
+
+ return waitForPromises().then(() => wrapper.vm.$nextTick());
+ });
+
+ it('should display "Sorry, your filter produced no results" if filters are too specific', () => {
+ expect(findEmptyState().props('title')).toMatchSnapshot();
+ });
+ });
+
+ describe('with closed state', () => {
+ beforeEach(() => {
+ window.location.search = '?state=closed';
+
+ factory();
+
+ return waitForPromises().then(() => wrapper.vm.$nextTick());
+ });
+
+ it('should display a message "There are no closed issues" if there are no closed issues', () => {
+ expect(findEmptyState().props('title')).toMatchSnapshot();
+ });
+ });
+
+ describe('with all state', () => {
+ beforeEach(() => {
+ window.location.search = '?state=all';
+
+ factory();
+
+ return waitForPromises().then(() => wrapper.vm.$nextTick());
+ });
+
+ it('should display a catch-all if there are no issues to show', () => {
+ expect(findEmptyState().element).toMatchSnapshot();
+ });
+ });
+
+ describe('with empty query', () => {
+ beforeEach(() => {
+ factory();
+
+ return wrapper.vm.$nextTick().then(waitForPromises);
+ });
+
+ it('should display the message "There are no open issues"', () => {
+ expect(findEmptyState().props('title')).toMatchSnapshot();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuables_list/issuable_list_test_data.js b/spec/frontend/issuables_list/issuable_list_test_data.js
new file mode 100644
index 00000000000..617780fd736
--- /dev/null
+++ b/spec/frontend/issuables_list/issuable_list_test_data.js
@@ -0,0 +1,72 @@
+export const simpleIssue = {
+ id: 442,
+ iid: 31,
+ title: 'Dismiss Cipher with no integrity',
+ state: 'opened',
+ created_at: '2019-08-26T19:06:32.667Z',
+ updated_at: '2019-08-28T19:53:58.314Z',
+ labels: [],
+ milestone: null,
+ assignees: [],
+ author: {
+ id: 3,
+ name: 'Elnora Bernhard',
+ username: 'treva.lesch',
+ state: 'active',
+ avatar_url: 'https://www.gravatar.com/avatar/a8c0d9c2882406cf2a9b71494625a796?s=80&d=identicon',
+ web_url: 'http://localhost:3001/treva.lesch',
+ },
+ assignee: null,
+ user_notes_count: 0,
+ merge_requests_count: 0,
+ upvotes: 0,
+ downvotes: 0,
+ due_date: null,
+ confidential: false,
+ web_url: 'http://localhost:3001/h5bp/html5-boilerplate/issues/31',
+ has_tasks: false,
+ weight: null,
+};
+
+export const testLabels = [
+ {
+ id: 1,
+ name: 'Tanuki',
+ description: 'A cute animal',
+ color: '#ff0000',
+ text_color: '#ffffff',
+ },
+ {
+ id: 2,
+ name: 'Octocat',
+ description: 'A grotesque mish-mash of whiskers and tentacles',
+ color: '#333333',
+ text_color: '#000000',
+ },
+ {
+ id: 3,
+ name: 'scoped::label',
+ description: 'A scoped label',
+ color: '#00ff00',
+ text_color: '#ffffff',
+ },
+];
+
+export const testAssignees = [
+ {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ web_url: 'http://localhost:3001/root',
+ },
+ {
+ id: 22,
+ name: 'User 0',
+ username: 'user0',
+ state: 'active',
+ avatar_url: 'https://www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80&d=identicon',
+ web_url: 'http://localhost:3001/user0',
+ },
+];
diff --git a/spec/frontend/issue_show/helpers.js b/spec/frontend/issue_show/helpers.js
new file mode 100644
index 00000000000..5d2ced98ae4
--- /dev/null
+++ b/spec/frontend/issue_show/helpers.js
@@ -0,0 +1,10 @@
+// eslint-disable-next-line import/prefer-default-export
+export const keyboardDownEvent = (code, metaKey = false, ctrlKey = false) => {
+ const e = new CustomEvent('keydown');
+
+ e.keyCode = code;
+ e.metaKey = metaKey;
+ e.ctrlKey = ctrlKey;
+
+ return e;
+};
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index cc334009982..7c834542a9a 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -60,8 +60,8 @@ describe('Job Log', () => {
expect(wrapper.find('.collapsible-line').attributes('role')).toBe('button');
});
- it('renders an icon with the closed state', () => {
- expect(wrapper.find('.collapsible-line svg').classes()).toContain('ic-angle-right');
+ it('renders an icon with the open state', () => {
+ expect(wrapper.find('.collapsible-line svg').classes()).toContain('ic-angle-down');
});
describe('on click header section', () => {
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index 43dacfe622c..8819f39dee0 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -26,7 +26,7 @@ describe('Jobs Store Utils', () => {
const parsedHeaderLine = parseHeaderLine(headerLine, 2);
expect(parsedHeaderLine).toEqual({
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
...headerLine,
@@ -57,7 +57,7 @@ describe('Jobs Store Utils', () => {
it('adds the section duration to the correct header', () => {
const parsed = [
{
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
section: 'prepare-script',
@@ -66,7 +66,7 @@ describe('Jobs Store Utils', () => {
lines: [],
},
{
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
section: 'foo-bar',
@@ -85,7 +85,7 @@ describe('Jobs Store Utils', () => {
it('does not add the section duration when the headers do not match', () => {
const parsed = [
{
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
section: 'bar-foo',
@@ -94,7 +94,7 @@ describe('Jobs Store Utils', () => {
lines: [],
},
{
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
section: 'foo-bar',
@@ -183,7 +183,7 @@ describe('Jobs Store Utils', () => {
describe('collpasible section', () => {
it('adds a `isClosed` property', () => {
- expect(result[1].isClosed).toEqual(true);
+ expect(result[1].isClosed).toEqual(false);
});
it('adds a `isHeader` property', () => {
@@ -213,7 +213,7 @@ describe('Jobs Store Utils', () => {
const existingLog = [
{
isHeader: true,
- isClosed: true,
+ isClosed: false,
line: { content: [{ text: 'bar' }], offset: 10, lineNumber: 1 },
},
];
@@ -263,7 +263,7 @@ describe('Jobs Store Utils', () => {
const existingLog = [
{
isHeader: true,
- isClosed: true,
+ isClosed: false,
lines: [{ offset: 101, content: [{ text: 'foobar' }], lineNumber: 2 }],
line: {
offset: 10,
@@ -435,7 +435,7 @@ describe('Jobs Store Utils', () => {
expect(result).toEqual([
{
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
offset: 1,
@@ -461,7 +461,7 @@ describe('Jobs Store Utils', () => {
expect(result).toEqual([
{
- isClosed: true,
+ isClosed: false,
isHeader: true,
line: {
offset: 1,
diff --git a/spec/frontend/lib/utils/chart_utils_spec.js b/spec/frontend/lib/utils/chart_utils_spec.js
new file mode 100644
index 00000000000..e811b8405fb
--- /dev/null
+++ b/spec/frontend/lib/utils/chart_utils_spec.js
@@ -0,0 +1,11 @@
+import { firstAndLastY } from '~/lib/utils/chart_utils';
+
+describe('Chart utils', () => {
+ describe('firstAndLastY', () => {
+ it('returns the first and last y-values of a given data set as an array', () => {
+ const data = [['', 1], ['', 2], ['', 3]];
+
+ expect(firstAndLastY(data)).toEqual([1, 3]);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index e2e71229320..ee27789b6b9 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -428,16 +428,57 @@ describe('newDate', () => {
});
describe('getDateInPast', () => {
- const date = new Date(1563235200000); // 2019-07-16T00:00:00.000Z;
+ const date = new Date('2019-07-16T00:00:00.000Z');
const daysInPast = 90;
it('returns the correct date in the past', () => {
const dateInPast = datetimeUtility.getDateInPast(date, daysInPast);
- expect(dateInPast).toBe('2019-04-17T00:00:00.000Z');
+ const expectedDateInPast = new Date('2019-04-17T00:00:00.000Z');
+
+ expect(dateInPast).toStrictEqual(expectedDateInPast);
});
it('does not modifiy the original date', () => {
datetimeUtility.getDateInPast(date, daysInPast);
- expect(date).toStrictEqual(new Date(1563235200000));
+ expect(date).toStrictEqual(new Date('2019-07-16T00:00:00.000Z'));
+ });
+});
+
+describe('getDatesInRange', () => {
+ it('returns an empty array if 1st or 2nd argument is not a Date object', () => {
+ const d1 = new Date('2019-01-01');
+ const d2 = 90;
+ const range = datetimeUtility.getDatesInRange(d1, d2);
+
+ expect(range).toEqual([]);
+ });
+
+ it('returns a range of dates between two given dates', () => {
+ const d1 = new Date('2019-01-01');
+ const d2 = new Date('2019-01-31');
+
+ const range = datetimeUtility.getDatesInRange(d1, d2);
+
+ expect(range.length).toEqual(31);
+ });
+
+ it('applies mapper function if provided fro each item in range', () => {
+ const d1 = new Date('2019-01-01');
+ const d2 = new Date('2019-01-31');
+ const formatter = date => date.getDate();
+
+ const range = datetimeUtility.getDatesInRange(d1, d2, formatter);
+
+ range.forEach((formattedItem, index) => {
+ expect(formattedItem).toEqual(index + 1);
+ });
+ });
+});
+
+describe('secondsToMilliseconds', () => {
+ it('converts seconds to milliseconds correctly', () => {
+ expect(datetimeUtility.secondsToMilliseconds(0)).toBe(0);
+ expect(datetimeUtility.secondsToMilliseconds(60)).toBe(60000);
+ expect(datetimeUtility.secondsToMilliseconds(123)).toBe(123000);
});
});
diff --git a/spec/frontend/lib/utils/number_utility_spec.js b/spec/frontend/lib/utils/number_utility_spec.js
index 381d7c6f8d9..2f8f1092612 100644
--- a/spec/frontend/lib/utils/number_utility_spec.js
+++ b/spec/frontend/lib/utils/number_utility_spec.js
@@ -7,6 +7,8 @@ import {
sum,
isOdd,
median,
+ changeInPercent,
+ formattedChangeInPercent,
} from '~/lib/utils/number_utils';
describe('Number Utils', () => {
@@ -122,4 +124,42 @@ describe('Number Utils', () => {
expect(median(items)).toBe(14.5);
});
});
+
+ describe('changeInPercent', () => {
+ it.each`
+ firstValue | secondValue | expectedOutput
+ ${99} | ${100} | ${1}
+ ${100} | ${99} | ${-1}
+ ${0} | ${99} | ${Infinity}
+ ${2} | ${2} | ${0}
+ ${-100} | ${-99} | ${1}
+ `(
+ 'computes the change between $firstValue and $secondValue in percent',
+ ({ firstValue, secondValue, expectedOutput }) => {
+ expect(changeInPercent(firstValue, secondValue)).toBe(expectedOutput);
+ },
+ );
+ });
+
+ describe('formattedChangeInPercent', () => {
+ it('prepends "%" to the output', () => {
+ expect(formattedChangeInPercent(1, 2)).toMatch(/%$/);
+ });
+
+ it('indicates if the change was a decrease', () => {
+ expect(formattedChangeInPercent(100, 99)).toContain('-1');
+ });
+
+ it('indicates if the change was an increase', () => {
+ expect(formattedChangeInPercent(99, 100)).toContain('+1');
+ });
+
+ it('shows "-" per default if the change can not be expressed in an integer', () => {
+ expect(formattedChangeInPercent(0, 1)).toBe('-');
+ });
+
+ it('shows the given fallback if the change can not be expressed in an integer', () => {
+ expect(formattedChangeInPercent(0, 1, { nonFiniteResult: '*' })).toBe('*');
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index b6f1aef9ce4..deb6dab772e 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -90,6 +90,19 @@ describe('text_utility', () => {
});
});
+ describe('convertToSnakeCase', () => {
+ it.each`
+ txt | result
+ ${'snakeCase'} | ${'snake_case'}
+ ${'snake Case'} | ${'snake_case'}
+ ${'snake case'} | ${'snake_case'}
+ ${'snake_case'} | ${'snake_case'}
+ ${'snakeCasesnake Case'} | ${'snake_casesnake_case'}
+ `('converts string $txt to $result string', ({ txt, result }) => {
+ expect(textUtils.convertToSnakeCase(txt)).toEqual(result);
+ });
+ });
+
describe('convertToSentenceCase', () => {
it('converts Sentence Case to Sentence case', () => {
expect(textUtils.convertToSentenceCase('Hello World')).toBe('Hello world');
diff --git a/spec/javascripts/monitoring/charts/time_series_spec.js b/spec/frontend/monitoring/charts/time_series_spec.js
index 5c718135b90..554535418fe 100644
--- a/spec/javascripts/monitoring/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/charts/time_series_spec.js
@@ -1,55 +1,77 @@
import { shallowMount } from '@vue/test-utils';
+import { setTestTimeout } from 'helpers/timeout';
import { createStore } from '~/monitoring/stores';
import { GlLink } from '@gitlab/ui';
import { GlAreaChart, GlLineChart, GlChartSeriesLabel } from '@gitlab/ui/dist/charts';
-import { shallowWrapperContainsSlotText } from 'spec/helpers/vue_test_utils_helper';
+import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
-import { TEST_HOST } from 'spec/test_constants';
-import MonitoringMock, { deploymentData, mockProjectPath } from '../mock_data';
+import {
+ deploymentData,
+ metricsGroupsAPIResponse,
+ mockedQueryResultPayload,
+ mockProjectDir,
+ mockHost,
+} from '../mock_data';
+
+import * as iconUtils from '~/lib/utils/icon_utils';
+
+const mockSvgPathContent = 'mockSvgPathContent';
+const mockWidgets = 'mockWidgets';
+
+jest.mock('~/lib/utils/icon_utils', () => ({
+ getSvgIconPathContent: jest.fn().mockImplementation(
+ () =>
+ new Promise(resolve => {
+ resolve(mockSvgPathContent);
+ }),
+ ),
+}));
describe('Time series component', () => {
- const mockSha = 'mockSha';
- const mockWidgets = 'mockWidgets';
- const mockSvgPathContent = 'mockSvgPathContent';
- const projectPath = `${TEST_HOST}${mockProjectPath}`;
- const commitUrl = `${projectPath}/commit/${mockSha}`;
let mockGraphData;
let makeTimeSeriesChart;
- let spriteSpy;
let store;
beforeEach(() => {
+ setTestTimeout(1000);
+
store = createStore();
- store.commit(`monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`, MonitoringMock.data);
+
+ store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ metricsGroupsAPIResponse,
+ );
+
store.commit(`monitoringDashboard/${types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS}`, deploymentData);
- [mockGraphData] = store.state.monitoringDashboard.groups[0].metrics;
+
+ // Mock data contains 2 panels, pick the first one
+ store.commit(`monitoringDashboard/${types.SET_QUERY_RESULT}`, mockedQueryResultPayload);
+
+ [mockGraphData] = store.state.monitoringDashboard.dashboard.panel_groups[0].metrics;
makeTimeSeriesChart = (graphData, type) =>
shallowMount(TimeSeries, {
propsData: {
graphData: { ...graphData, type },
- containerWidth: 0,
deploymentData: store.state.monitoringDashboard.deploymentData,
- projectPath,
+ projectPath: `${mockHost}${mockProjectDir}`,
},
slots: {
default: mockWidgets,
},
sync: false,
store,
+ attachToDocument: true,
});
-
- spriteSpy = spyOnDependency(TimeSeries, 'getSvgIconPathContent').and.callFake(
- () => new Promise(resolve => resolve(mockSvgPathContent)),
- );
});
describe('general functions', () => {
let timeSeriesChart;
- beforeEach(() => {
+ beforeEach(done => {
timeSeriesChart = makeTimeSeriesChart(mockGraphData, 'area-chart');
+ timeSeriesChart.vm.$nextTick(done);
});
it('renders chart title', () => {
@@ -74,18 +96,24 @@ describe('Time series component', () => {
describe('methods', () => {
describe('formatTooltipText', () => {
- const mockDate = deploymentData[0].created_at;
- const mockCommitUrl = deploymentData[0].commitUrl;
- const generateSeriesData = type => ({
- seriesData: [
- {
- seriesName: timeSeriesChart.vm.chartData[0].name,
- componentSubType: type,
- value: [mockDate, 5.55555],
- seriesIndex: 0,
- },
- ],
- value: mockDate,
+ let mockDate;
+ let mockCommitUrl;
+ let generateSeriesData;
+
+ beforeEach(() => {
+ mockDate = deploymentData[0].created_at;
+ mockCommitUrl = deploymentData[0].commitUrl;
+ generateSeriesData = type => ({
+ seriesData: [
+ {
+ seriesName: timeSeriesChart.vm.chartData[0].name,
+ componentSubType: type,
+ value: [mockDate, 5.55555],
+ dataIndex: 0,
+ },
+ ],
+ value: mockDate,
+ });
});
describe('when series is of line type', () => {
@@ -95,17 +123,21 @@ describe('Time series component', () => {
});
it('formats tooltip title', () => {
- expect(timeSeriesChart.vm.tooltip.title).toBe('31 May 2017, 9:23PM');
+ expect(timeSeriesChart.vm.tooltip.title).toBe('16 Jul 2019, 10:14AM');
});
it('formats tooltip content', () => {
- const name = 'Core Usage';
+ const name = 'Pod average';
const value = '5.556';
+ const dataIndex = 0;
const seriesLabel = timeSeriesChart.find(GlChartSeriesLabel);
expect(seriesLabel.vm.color).toBe('');
expect(shallowWrapperContainsSlotText(seriesLabel, 'default', name)).toBe(true);
- expect(timeSeriesChart.vm.tooltip.content).toEqual([{ name, value, color: undefined }]);
+ expect(timeSeriesChart.vm.tooltip.content).toEqual([
+ { name, value, dataIndex, color: undefined },
+ ]);
+
expect(
shallowWrapperContainsSlotText(
timeSeriesChart.find(GlAreaChart),
@@ -116,13 +148,13 @@ describe('Time series component', () => {
});
});
- describe('when series is of scatter type', () => {
+ describe('when series is of scatter type, for deployments', () => {
beforeEach(() => {
timeSeriesChart.vm.formatTooltipText(generateSeriesData('scatter'));
});
it('formats tooltip title', () => {
- expect(timeSeriesChart.vm.tooltip.title).toBe('31 May 2017, 9:23PM');
+ expect(timeSeriesChart.vm.tooltip.title).toBe('16 Jul 2019, 10:14AM');
});
it('formats tooltip sha', () => {
@@ -144,7 +176,7 @@ describe('Time series component', () => {
});
it('gets svg path content', () => {
- expect(spriteSpy).toHaveBeenCalledWith(mockSvgName);
+ expect(iconUtils.getSvgIconPathContent).toHaveBeenCalledWith(mockSvgName);
});
it('sets svg path content', () => {
@@ -168,7 +200,7 @@ describe('Time series component', () => {
const mockWidth = 233;
beforeEach(() => {
- spyOn(Element.prototype, 'getBoundingClientRect').and.callFake(() => ({
+ jest.spyOn(Element.prototype, 'getBoundingClientRect').mockImplementation(() => ({
width: mockWidth,
}));
timeSeriesChart.vm.onResize();
@@ -212,6 +244,39 @@ describe('Time series component', () => {
});
describe('chartOptions', () => {
+ describe('are extended by `option`', () => {
+ const mockSeriesName = 'Extra series 1';
+ const mockOption = {
+ option1: 'option1',
+ option2: 'option2',
+ };
+
+ it('arbitrary options', () => {
+ timeSeriesChart.setProps({
+ option: mockOption,
+ });
+
+ expect(timeSeriesChart.vm.chartOptions).toEqual(expect.objectContaining(mockOption));
+ });
+
+ it('additional series', () => {
+ timeSeriesChart.setProps({
+ option: {
+ series: [
+ {
+ name: mockSeriesName,
+ },
+ ],
+ },
+ });
+
+ const optionSeries = timeSeriesChart.vm.chartOptions.series;
+
+ expect(optionSeries.length).toEqual(2);
+ expect(optionSeries[0].name).toEqual(mockSeriesName);
+ });
+ });
+
describe('yAxis formatter', () => {
let format;
@@ -228,9 +293,9 @@ describe('Time series component', () => {
describe('scatterSeries', () => {
it('utilizes deployment data', () => {
expect(timeSeriesChart.vm.scatterSeries.data).toEqual([
- ['2017-05-31T21:23:37.881Z', 0],
- ['2017-05-30T20:08:04.629Z', 0],
- ['2017-05-30T17:42:38.409Z', 0],
+ ['2019-07-16T10:14:25.589Z', 0],
+ ['2019-07-16T11:14:25.589Z', 0],
+ ['2019-07-16T12:14:25.589Z', 0],
]);
expect(timeSeriesChart.vm.scatterSeries.symbolSize).toBe(14);
@@ -239,7 +304,7 @@ describe('Time series component', () => {
describe('yAxisLabel', () => {
it('constructs a label for the chart y-axis', () => {
- expect(timeSeriesChart.vm.yAxisLabel).toBe('CPU');
+ expect(timeSeriesChart.vm.yAxisLabel).toBe('Memory Used per Pod');
});
});
});
@@ -272,6 +337,10 @@ describe('Time series component', () => {
timeSeriesAreaChart.vm.$nextTick(done);
});
+ afterEach(() => {
+ timeSeriesAreaChart.destroy();
+ });
+
it('is a Vue instance', () => {
expect(glChart.exists()).toBe(true);
expect(glChart.isVueInstance()).toBe(true);
@@ -297,6 +366,9 @@ describe('Time series component', () => {
});
describe('when tooltip is showing deployment data', () => {
+ const mockSha = 'mockSha';
+ const commitUrl = `${mockProjectDir}/commit/${mockSha}`;
+
beforeEach(done => {
timeSeriesAreaChart.vm.tooltip.isDeployment = true;
timeSeriesAreaChart.vm.$nextTick(done);
diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js
new file mode 100644
index 00000000000..6707d0b1fe8
--- /dev/null
+++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js
@@ -0,0 +1,303 @@
+import Anomaly from '~/monitoring/components/charts/anomaly.vue';
+
+import { shallowMount } from '@vue/test-utils';
+import { colorValues } from '~/monitoring/constants';
+import {
+ anomalyDeploymentData,
+ mockProjectDir,
+ anomalyMockGraphData,
+ anomalyMockResultValues,
+} from '../../mock_data';
+import { TEST_HOST } from 'helpers/test_constants';
+import MonitorTimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
+
+const mockWidgets = 'mockWidgets';
+const mockProjectPath = `${TEST_HOST}${mockProjectDir}`;
+
+jest.mock('~/lib/utils/icon_utils'); // mock getSvgIconPathContent
+
+const makeAnomalyGraphData = (datasetName, template = anomalyMockGraphData) => {
+ const queries = anomalyMockResultValues[datasetName].map((values, index) => ({
+ ...template.queries[index],
+ result: [
+ {
+ metrics: {},
+ values,
+ },
+ ],
+ }));
+ return { ...template, queries };
+};
+
+describe('Anomaly chart component', () => {
+ let wrapper;
+
+ const setupAnomalyChart = props => {
+ wrapper = shallowMount(Anomaly, {
+ propsData: { ...props },
+ slots: {
+ default: mockWidgets,
+ },
+ sync: false,
+ });
+ };
+ const findTimeSeries = () => wrapper.find(MonitorTimeSeriesChart);
+ const getTimeSeriesProps = () => findTimeSeries().props();
+
+ describe('wrapped monitor-time-series-chart component', () => {
+ const dataSetName = 'noAnomaly';
+ const dataSet = anomalyMockResultValues[dataSetName];
+ const inputThresholds = ['some threshold'];
+
+ beforeEach(() => {
+ setupAnomalyChart({
+ graphData: makeAnomalyGraphData(dataSetName),
+ deploymentData: anomalyDeploymentData,
+ thresholds: inputThresholds,
+ projectPath: mockProjectPath,
+ });
+ });
+
+ it('is a Vue instance', () => {
+ expect(findTimeSeries().exists()).toBe(true);
+ expect(findTimeSeries().isVueInstance()).toBe(true);
+ });
+
+ describe('receives props correctly', () => {
+ describe('graph-data', () => {
+ it('receives a single "metric" series', () => {
+ const { graphData } = getTimeSeriesProps();
+ expect(graphData.queries.length).toBe(1);
+ });
+
+ it('receives "metric" with all data', () => {
+ const { graphData } = getTimeSeriesProps();
+ const query = graphData.queries[0];
+ const expectedQuery = makeAnomalyGraphData(dataSetName).queries[0];
+ expect(query).toEqual(expectedQuery);
+ });
+
+ it('receives the "metric" results', () => {
+ const { graphData } = getTimeSeriesProps();
+ const { result } = graphData.queries[0];
+ const { values } = result[0];
+ const [metricDataset] = dataSet;
+ expect(values).toEqual(expect.any(Array));
+
+ values.forEach(([, y], index) => {
+ expect(y).toBeCloseTo(metricDataset[index][1]);
+ });
+ });
+ });
+
+ describe('option', () => {
+ let option;
+ let series;
+
+ beforeEach(() => {
+ ({ option } = getTimeSeriesProps());
+ ({ series } = option);
+ });
+
+ it('contains a boundary band', () => {
+ expect(series).toEqual(expect.any(Array));
+ expect(series.length).toEqual(2); // 1 upper + 1 lower boundaries
+ expect(series[0].stack).toEqual(series[1].stack);
+
+ series.forEach(s => {
+ expect(s.type).toBe('line');
+ expect(s.lineStyle.width).toBe(0);
+ expect(s.lineStyle.color).toMatch(/rgba\(.+\)/);
+ expect(s.lineStyle.color).toMatch(s.color);
+ expect(s.symbol).toEqual('none');
+ });
+ });
+
+ it('upper boundary values are stacked on top of lower boundary', () => {
+ const [lowerSeries, upperSeries] = series;
+ const [, upperDataset, lowerDataset] = dataSet;
+
+ lowerSeries.data.forEach(([, y], i) => {
+ expect(y).toBeCloseTo(lowerDataset[i][1]);
+ });
+
+ upperSeries.data.forEach(([, y], i) => {
+ expect(y).toBeCloseTo(upperDataset[i][1] - lowerDataset[i][1]);
+ });
+ });
+ });
+
+ describe('series-config', () => {
+ let seriesConfig;
+
+ beforeEach(() => {
+ ({ seriesConfig } = getTimeSeriesProps());
+ });
+
+ it('display symbols is enabled', () => {
+ expect(seriesConfig).toEqual(
+ expect.objectContaining({
+ type: 'line',
+ symbol: 'circle',
+ showSymbol: true,
+ symbolSize: expect.any(Function),
+ itemStyle: {
+ color: expect.any(Function),
+ },
+ }),
+ );
+ });
+ it('does not display anomalies', () => {
+ const { symbolSize, itemStyle } = seriesConfig;
+ const [metricDataset] = dataSet;
+
+ metricDataset.forEach((v, dataIndex) => {
+ const size = symbolSize(null, { dataIndex });
+ const color = itemStyle.color({ dataIndex });
+
+ // normal color and small size
+ expect(size).toBeCloseTo(0);
+ expect(color).toBe(colorValues.primaryColor);
+ });
+ });
+
+ it('can format y values (to use in tooltips)', () => {
+ expect(parseFloat(wrapper.vm.yValueFormatted(0, 0))).toEqual(dataSet[0][0][1]);
+ expect(parseFloat(wrapper.vm.yValueFormatted(1, 0))).toEqual(dataSet[1][0][1]);
+ expect(parseFloat(wrapper.vm.yValueFormatted(2, 0))).toEqual(dataSet[2][0][1]);
+ });
+ });
+
+ describe('inherited properties', () => {
+ it('"deployment-data" keeps the same value', () => {
+ const { deploymentData } = getTimeSeriesProps();
+ expect(deploymentData).toEqual(anomalyDeploymentData);
+ });
+ it('"thresholds" keeps the same value', () => {
+ const { thresholds } = getTimeSeriesProps();
+ expect(thresholds).toEqual(inputThresholds);
+ });
+ it('"projectPath" keeps the same value', () => {
+ const { projectPath } = getTimeSeriesProps();
+ expect(projectPath).toEqual(mockProjectPath);
+ });
+ });
+ });
+ });
+
+ describe('with no boundary data', () => {
+ const dataSetName = 'noBoundary';
+ const dataSet = anomalyMockResultValues[dataSetName];
+
+ beforeEach(() => {
+ setupAnomalyChart({
+ graphData: makeAnomalyGraphData(dataSetName),
+ deploymentData: anomalyDeploymentData,
+ });
+ });
+
+ describe('option', () => {
+ let option;
+ let series;
+
+ beforeEach(() => {
+ ({ option } = getTimeSeriesProps());
+ ({ series } = option);
+ });
+
+ it('does not display a boundary band', () => {
+ expect(series).toEqual(expect.any(Array));
+ expect(series.length).toEqual(0); // no boundaries
+ });
+
+ it('can format y values (to use in tooltips)', () => {
+ expect(parseFloat(wrapper.vm.yValueFormatted(0, 0))).toEqual(dataSet[0][0][1]);
+ expect(wrapper.vm.yValueFormatted(1, 0)).toBe(''); // missing boundary
+ expect(wrapper.vm.yValueFormatted(2, 0)).toBe(''); // missing boundary
+ });
+ });
+ });
+
+ describe('with one anomaly', () => {
+ const dataSetName = 'oneAnomaly';
+ const dataSet = anomalyMockResultValues[dataSetName];
+
+ beforeEach(() => {
+ setupAnomalyChart({
+ graphData: makeAnomalyGraphData(dataSetName),
+ deploymentData: anomalyDeploymentData,
+ });
+ });
+
+ describe('series-config', () => {
+ it('displays one anomaly', () => {
+ const { seriesConfig } = getTimeSeriesProps();
+ const { symbolSize, itemStyle } = seriesConfig;
+ const [metricDataset] = dataSet;
+
+ const bigDots = metricDataset.filter((v, dataIndex) => {
+ const size = symbolSize(null, { dataIndex });
+ return size > 0.1;
+ });
+ const redDots = metricDataset.filter((v, dataIndex) => {
+ const color = itemStyle.color({ dataIndex });
+ return color === colorValues.anomalySymbol;
+ });
+
+ expect(bigDots.length).toBe(1);
+ expect(redDots.length).toBe(1);
+ });
+ });
+ });
+
+ describe('with offset', () => {
+ const dataSetName = 'negativeBoundary';
+ const dataSet = anomalyMockResultValues[dataSetName];
+ const expectedOffset = 4; // Lowst point in mock data is -3.70, it gets rounded
+
+ beforeEach(() => {
+ setupAnomalyChart({
+ graphData: makeAnomalyGraphData(dataSetName),
+ deploymentData: anomalyDeploymentData,
+ });
+ });
+
+ describe('receives props correctly', () => {
+ describe('graph-data', () => {
+ it('receives a single "metric" series', () => {
+ const { graphData } = getTimeSeriesProps();
+ expect(graphData.queries.length).toBe(1);
+ });
+
+ it('receives "metric" results and applies the offset to them', () => {
+ const { graphData } = getTimeSeriesProps();
+ const { result } = graphData.queries[0];
+ const { values } = result[0];
+ const [metricDataset] = dataSet;
+ expect(values).toEqual(expect.any(Array));
+
+ values.forEach(([, y], index) => {
+ expect(y).toBeCloseTo(metricDataset[index][1] + expectedOffset);
+ });
+ });
+ });
+ });
+
+ describe('option', () => {
+ it('upper boundary values are stacked on top of lower boundary, plus the offset', () => {
+ const { option } = getTimeSeriesProps();
+ const { series } = option;
+ const [lowerSeries, upperSeries] = series;
+ const [, upperDataset, lowerDataset] = dataSet;
+
+ lowerSeries.data.forEach(([, y], i) => {
+ expect(y).toBeCloseTo(lowerDataset[i][1] + expectedOffset);
+ });
+
+ upperSeries.data.forEach(([, y], i) => {
+ expect(y).toBeCloseTo(upperDataset[i][1] - lowerDataset[i][1]);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
index be544435671..ca05461c8cf 100644
--- a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
+++ b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
@@ -51,6 +51,16 @@ describe('DateTimePicker', () => {
});
});
+ it('renders dropdown without a selectedTimeWindow set', done => {
+ createComponent({
+ selectedTimeWindow: {},
+ });
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dateTimePicker.findAll('input').length).toBe(2);
+ done();
+ });
+ });
+
it('renders inputs with h/m/s truncated if its all 0s', done => {
createComponent({
selectedTimeWindow: {
diff --git a/spec/frontend/monitoring/embed/embed_spec.js b/spec/frontend/monitoring/embed/embed_spec.js
index 5de1a7c4c3b..3e22b0858e6 100644
--- a/spec/frontend/monitoring/embed/embed_spec.js
+++ b/spec/frontend/monitoring/embed/embed_spec.js
@@ -61,8 +61,8 @@ describe('Embed', () => {
describe('metrics are available', () => {
beforeEach(() => {
- store.state.monitoringDashboard.groups = groups;
- store.state.monitoringDashboard.groups[0].metrics = metricsData;
+ store.state.monitoringDashboard.dashboard.panel_groups = groups;
+ store.state.monitoringDashboard.dashboard.panel_groups[0].metrics = metricsData;
store.state.monitoringDashboard.metricsWithData = metricsWithData;
mountComponent();
diff --git a/spec/frontend/monitoring/embed/mock_data.js b/spec/frontend/monitoring/embed/mock_data.js
index df4acb82e95..1685021fd4b 100644
--- a/spec/frontend/monitoring/embed/mock_data.js
+++ b/spec/frontend/monitoring/embed/mock_data.js
@@ -81,7 +81,9 @@ export const metricsData = [
export const initialState = {
monitoringDashboard: {},
- groups: [],
+ dashboard: {
+ panel_groups: [],
+ },
metricsWithData: [],
useDashboardEndpoint: true,
};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
new file mode 100644
index 00000000000..c42366ab484
--- /dev/null
+++ b/spec/frontend/monitoring/mock_data.js
@@ -0,0 +1,465 @@
+export const mockHost = 'http://test.host';
+export const mockProjectDir = '/frontend-fixtures/environments-project';
+
+export const anomalyDeploymentData = [
+ {
+ id: 111,
+ iid: 3,
+ sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ ref: {
+ name: 'master',
+ },
+ created_at: '2019-08-19T22:00:00.000Z',
+ deployed_at: '2019-08-19T22:01:00.000Z',
+ tag: false,
+ 'last?': true,
+ },
+ {
+ id: 110,
+ iid: 2,
+ sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ ref: {
+ name: 'master',
+ },
+ created_at: '2019-08-19T23:00:00.000Z',
+ deployed_at: '2019-08-19T23:00:00.000Z',
+ tag: false,
+ 'last?': false,
+ },
+];
+
+export const anomalyMockResultValues = {
+ noAnomaly: [
+ [
+ ['2019-08-19T19:00:00.000Z', 1.25],
+ ['2019-08-19T20:00:00.000Z', 1.45],
+ ['2019-08-19T21:00:00.000Z', 1.55],
+ ['2019-08-19T22:00:00.000Z', 1.48],
+ ],
+ [
+ // upper boundary
+ ['2019-08-19T19:00:00.000Z', 2],
+ ['2019-08-19T20:00:00.000Z', 2.55],
+ ['2019-08-19T21:00:00.000Z', 2.65],
+ ['2019-08-19T22:00:00.000Z', 3.0],
+ ],
+ [
+ // lower boundary
+ ['2019-08-19T19:00:00.000Z', 0.45],
+ ['2019-08-19T20:00:00.000Z', 0.65],
+ ['2019-08-19T21:00:00.000Z', 0.7],
+ ['2019-08-19T22:00:00.000Z', 0.8],
+ ],
+ ],
+ noBoundary: [
+ [
+ ['2019-08-19T19:00:00.000Z', 1.25],
+ ['2019-08-19T20:00:00.000Z', 1.45],
+ ['2019-08-19T21:00:00.000Z', 1.55],
+ ['2019-08-19T22:00:00.000Z', 1.48],
+ ],
+ [
+ // empty upper boundary
+ ],
+ [
+ // empty lower boundary
+ ],
+ ],
+ oneAnomaly: [
+ [
+ ['2019-08-19T19:00:00.000Z', 1.25],
+ ['2019-08-19T20:00:00.000Z', 3.45], // anomaly
+ ['2019-08-19T21:00:00.000Z', 1.55],
+ ],
+ [
+ // upper boundary
+ ['2019-08-19T19:00:00.000Z', 2],
+ ['2019-08-19T20:00:00.000Z', 2.55],
+ ['2019-08-19T21:00:00.000Z', 2.65],
+ ],
+ [
+ // lower boundary
+ ['2019-08-19T19:00:00.000Z', 0.45],
+ ['2019-08-19T20:00:00.000Z', 0.65],
+ ['2019-08-19T21:00:00.000Z', 0.7],
+ ],
+ ],
+ negativeBoundary: [
+ [
+ ['2019-08-19T19:00:00.000Z', 1.25],
+ ['2019-08-19T20:00:00.000Z', 3.45], // anomaly
+ ['2019-08-19T21:00:00.000Z', 1.55],
+ ],
+ [
+ // upper boundary
+ ['2019-08-19T19:00:00.000Z', 2],
+ ['2019-08-19T20:00:00.000Z', 2.55],
+ ['2019-08-19T21:00:00.000Z', 2.65],
+ ],
+ [
+ // lower boundary
+ ['2019-08-19T19:00:00.000Z', -1.25],
+ ['2019-08-19T20:00:00.000Z', -2.65],
+ ['2019-08-19T21:00:00.000Z', -3.7], // lowest point
+ ],
+ ],
+};
+
+export const anomalyMockGraphData = {
+ title: 'Requests Per Second Mock Data',
+ type: 'anomaly-chart',
+ weight: 3,
+ metrics: [
+ // Not used
+ ],
+ queries: [
+ {
+ metricId: '90',
+ id: 'metric',
+ query_range: 'MOCK_PROMETHEUS_METRIC_QUERY_RANGE',
+ unit: 'RPS',
+ label: 'Metrics RPS',
+ metric_id: 90,
+ prometheus_endpoint_path: 'MOCK_METRIC_PEP',
+ result: [
+ {
+ metric: {},
+ values: [['2019-08-19T19:00:00.000Z', 0]],
+ },
+ ],
+ },
+ {
+ metricId: '91',
+ id: 'upper',
+ query_range: '...',
+ unit: 'RPS',
+ label: 'Upper Limit Metrics RPS',
+ metric_id: 91,
+ prometheus_endpoint_path: 'MOCK_UPPER_PEP',
+ result: [
+ {
+ metric: {},
+ values: [['2019-08-19T19:00:00.000Z', 0]],
+ },
+ ],
+ },
+ {
+ metricId: '92',
+ id: 'lower',
+ query_range: '...',
+ unit: 'RPS',
+ label: 'Lower Limit Metrics RPS',
+ metric_id: 92,
+ prometheus_endpoint_path: 'MOCK_LOWER_PEP',
+ result: [
+ {
+ metric: {},
+ values: [['2019-08-19T19:00:00.000Z', 0]],
+ },
+ ],
+ },
+ ],
+};
+
+export const deploymentData = [
+ {
+ id: 111,
+ iid: 3,
+ sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ commitUrl:
+ 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ ref: {
+ name: 'master',
+ },
+ created_at: '2019-07-16T10:14:25.589Z',
+ tag: false,
+ tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false',
+ 'last?': true,
+ },
+ {
+ id: 110,
+ iid: 2,
+ sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ commitUrl:
+ 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ ref: {
+ name: 'master',
+ },
+ created_at: '2019-07-16T11:14:25.589Z',
+ tag: false,
+ tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false',
+ 'last?': false,
+ },
+ {
+ id: 109,
+ iid: 1,
+ sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2',
+ commitUrl:
+ 'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2',
+ ref: {
+ name: 'update2-readme',
+ },
+ created_at: '2019-07-16T12:14:25.589Z',
+ tag: false,
+ tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false',
+ 'last?': false,
+ },
+];
+
+export const metricsNewGroupsAPIResponse = [
+ {
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
+ panels: [
+ {
+ title: 'Memory Usage (Pod average)',
+ type: 'area-chart',
+ y_label: 'Memory Used per Pod',
+ weight: 2,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_average',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ label: 'Pod average',
+ unit: 'MB',
+ metric_id: 17,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
+ appearance: {
+ line: {
+ width: 2,
+ },
+ },
+ },
+ ],
+ },
+ ],
+ },
+];
+
+export const mockedQueryResultPayload = {
+ metricId: '17_system_metrics_kubernetes_container_memory_average',
+ result: [
+ {
+ metric: {},
+ values: [
+ [1563272065.589, '10.396484375'],
+ [1563272125.589, '10.333984375'],
+ [1563272185.589, '10.333984375'],
+ [1563272245.589, '10.333984375'],
+ [1563272305.589, '10.333984375'],
+ [1563272365.589, '10.333984375'],
+ [1563272425.589, '10.38671875'],
+ [1563272485.589, '10.333984375'],
+ [1563272545.589, '10.333984375'],
+ [1563272605.589, '10.333984375'],
+ [1563272665.589, '10.333984375'],
+ [1563272725.589, '10.333984375'],
+ [1563272785.589, '10.396484375'],
+ [1563272845.589, '10.333984375'],
+ [1563272905.589, '10.333984375'],
+ [1563272965.589, '10.3984375'],
+ [1563273025.589, '10.337890625'],
+ [1563273085.589, '10.34765625'],
+ [1563273145.589, '10.337890625'],
+ [1563273205.589, '10.337890625'],
+ [1563273265.589, '10.337890625'],
+ [1563273325.589, '10.337890625'],
+ [1563273385.589, '10.337890625'],
+ [1563273445.589, '10.337890625'],
+ [1563273505.589, '10.337890625'],
+ [1563273565.589, '10.337890625'],
+ [1563273625.589, '10.337890625'],
+ [1563273685.589, '10.337890625'],
+ [1563273745.589, '10.337890625'],
+ [1563273805.589, '10.337890625'],
+ [1563273865.589, '10.390625'],
+ [1563273925.589, '10.390625'],
+ ],
+ },
+ ],
+};
+
+export const metricsGroupsAPIResponse = [
+ {
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
+ panels: [
+ {
+ title: 'Memory Usage (Pod average)',
+ type: 'area-chart',
+ y_label: 'Memory Used per Pod',
+ weight: 2,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_average',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ label: 'Pod average',
+ unit: 'MB',
+ metric_id: 17,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
+ appearance: {
+ line: {
+ width: 2,
+ },
+ },
+ },
+ ],
+ },
+ {
+ title: 'Core Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Cores',
+ weight: 3,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_cores_total',
+ query_range:
+ 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
+ label: 'Total',
+ unit: 'cores',
+ metric_id: 13,
+ },
+ ],
+ },
+ ],
+ },
+];
+
+export const environmentData = [
+ {
+ id: 34,
+ name: 'production',
+ state: 'available',
+ external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
+ environment_type: null,
+ stop_action: false,
+ metrics_path: '/root/hello-prometheus/environments/34/metrics',
+ environment_path: '/root/hello-prometheus/environments/34',
+ stop_path: '/root/hello-prometheus/environments/34/stop',
+ terminal_path: '/root/hello-prometheus/environments/34/terminal',
+ folder_path: '/root/hello-prometheus/environments/folders/production',
+ created_at: '2018-06-29T16:53:38.301Z',
+ updated_at: '2018-06-29T16:57:09.825Z',
+ last_deployment: {
+ id: 127,
+ },
+ },
+ {
+ id: 35,
+ name: 'review/noop-branch',
+ state: 'available',
+ external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
+ environment_type: 'review',
+ stop_action: true,
+ metrics_path: '/root/hello-prometheus/environments/35/metrics',
+ environment_path: '/root/hello-prometheus/environments/35',
+ stop_path: '/root/hello-prometheus/environments/35/stop',
+ terminal_path: '/root/hello-prometheus/environments/35/terminal',
+ folder_path: '/root/hello-prometheus/environments/folders/review',
+ created_at: '2018-07-03T18:39:41.702Z',
+ updated_at: '2018-07-03T18:44:54.010Z',
+ last_deployment: {
+ id: 128,
+ },
+ },
+ {
+ id: 36,
+ name: 'no-deployment/noop-branch',
+ state: 'available',
+ created_at: '2018-07-04T18:39:41.702Z',
+ updated_at: '2018-07-04T18:44:54.010Z',
+ },
+];
+
+export const metricsDashboardResponse = {
+ dashboard: {
+ dashboard: 'Environment metrics',
+ priority: 1,
+ panel_groups: [
+ {
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
+ panels: [
+ {
+ title: 'Memory Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Memory Used',
+ weight: 4,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_total',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
+ label: 'Total',
+ unit: 'GB',
+ metric_id: 12,
+ prometheus_endpoint_path: 'http://test',
+ },
+ ],
+ },
+ {
+ title: 'Core Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Cores',
+ weight: 3,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_cores_total',
+ query_range:
+ 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
+ label: 'Total',
+ unit: 'cores',
+ metric_id: 13,
+ },
+ ],
+ },
+ {
+ title: 'Memory Usage (Pod average)',
+ type: 'line-chart',
+ y_label: 'Memory Used per Pod',
+ weight: 2,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_average',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ label: 'Pod average',
+ unit: 'MB',
+ metric_id: 14,
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
+ status: 'success',
+};
+
+export const dashboardGitResponse = [
+ {
+ default: true,
+ display_name: 'Default',
+ can_edit: false,
+ project_blob_path: null,
+ path: 'config/prometheus/common_metrics.yml',
+ },
+ {
+ default: false,
+ display_name: 'Custom Dashboard 1',
+ can_edit: true,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
+ path: '.gitlab/dashboards/dashboard_1.yml',
+ },
+ {
+ default: false,
+ display_name: 'Custom Dashboard 2',
+ can_edit: true,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
+ path: '.gitlab/dashboards/dashboard_2.yml',
+ },
+];
diff --git a/spec/frontend/monitoring/panel_type_spec.js b/spec/frontend/monitoring/panel_type_spec.js
new file mode 100644
index 00000000000..54a63e7f61f
--- /dev/null
+++ b/spec/frontend/monitoring/panel_type_spec.js
@@ -0,0 +1,166 @@
+import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { setTestTimeout } from 'helpers/timeout';
+import axios from '~/lib/utils/axios_utils';
+import PanelType from '~/monitoring/components/panel_type.vue';
+import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
+import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
+import AnomalyChart from '~/monitoring/components/charts/anomaly.vue';
+import { graphDataPrometheusQueryRange } from '../../javascripts/monitoring/mock_data';
+import { anomalyMockGraphData } from '../../frontend/monitoring/mock_data';
+import { createStore } from '~/monitoring/stores';
+
+global.IS_EE = true;
+global.URL.createObjectURL = jest.fn();
+
+describe('Panel Type component', () => {
+ let axiosMock;
+ let store;
+ let panelType;
+ const dashboardWidth = 100;
+ const exampleText = 'example_text';
+
+ beforeEach(() => {
+ setTestTimeout(1000);
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.reset();
+ });
+
+ describe('When no graphData is available', () => {
+ let glEmptyChart;
+ // Deep clone object before modifying
+ const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
+ graphDataNoResult.queries[0].result = [];
+
+ beforeEach(() => {
+ panelType = shallowMount(PanelType, {
+ propsData: {
+ clipboardText: 'dashboard_link',
+ dashboardWidth,
+ graphData: graphDataNoResult,
+ },
+ sync: false,
+ attachToDocument: true,
+ });
+ });
+
+ afterEach(() => {
+ panelType.destroy();
+ });
+
+ describe('Empty Chart component', () => {
+ beforeEach(() => {
+ glEmptyChart = panelType.find(EmptyChart);
+ });
+
+ it('is a Vue instance', () => {
+ expect(glEmptyChart.isVueInstance()).toBe(true);
+ });
+
+ it('it receives a graph title', () => {
+ const props = glEmptyChart.props();
+
+ expect(props.graphTitle).toBe(panelType.vm.graphData.title);
+ });
+ });
+ });
+
+ describe('when Graph data is available', () => {
+ const propsData = {
+ clipboardText: exampleText,
+ dashboardWidth,
+ graphData: graphDataPrometheusQueryRange,
+ };
+
+ beforeEach(done => {
+ store = createStore();
+ panelType = shallowMount(PanelType, {
+ propsData,
+ store,
+ sync: false,
+ attachToDocument: true,
+ });
+ panelType.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ panelType.destroy();
+ });
+
+ describe('Time Series Chart panel type', () => {
+ it('is rendered', () => {
+ expect(panelType.find(TimeSeriesChart).isVueInstance()).toBe(true);
+ expect(panelType.find(TimeSeriesChart).exists()).toBe(true);
+ });
+
+ it('sets clipboard text on the dropdown', () => {
+ const link = () => panelType.find('.js-chart-link');
+ const clipboardText = () => link().element.dataset.clipboardText;
+
+ expect(clipboardText()).toBe(exampleText);
+ });
+ });
+
+ describe('Anomaly Chart panel type', () => {
+ beforeEach(done => {
+ panelType.setProps({
+ graphData: anomalyMockGraphData,
+ });
+ panelType.vm.$nextTick(done);
+ });
+
+ it('is rendered with an anomaly chart', () => {
+ expect(panelType.find(AnomalyChart).isVueInstance()).toBe(true);
+ expect(panelType.find(AnomalyChart).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when downloading metrics data as CSV', () => {
+ beforeEach(done => {
+ graphDataPrometheusQueryRange.y_label = 'metric';
+ store = createStore();
+ panelType = shallowMount(PanelType, {
+ propsData: {
+ clipboardText: exampleText,
+ dashboardWidth,
+ graphData: graphDataPrometheusQueryRange,
+ },
+ store,
+ sync: false,
+ attachToDocument: true,
+ });
+ panelType.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ panelType.destroy();
+ });
+
+ describe('csvText', () => {
+ it('converts metrics data from json to csv', () => {
+ const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`;
+ const data = graphDataPrometheusQueryRange.queries[0].result[0].values;
+ const firstRow = `${data[0][0]},${data[0][1]}`;
+ const secondRow = `${data[1][0]},${data[1][1]}`;
+
+ expect(panelType.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ });
+ });
+
+ describe('downloadCsv', () => {
+ it('produces a link with a Blob', () => {
+ expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(expect.any(Blob));
+ expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(
+ expect.objectContaining({
+ size: panelType.vm.csvText.length,
+ type: 'text/plain',
+ }),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 1bd74f59282..d4bc613ffea 100644
--- a/spec/javascripts/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -1,8 +1,14 @@
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+import statusCodes from '~/lib/utils/http_status';
+import { backOff } from '~/lib/utils/common_utils';
+
import store from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import {
+ backOffRequest,
fetchDashboard,
receiveMetricsDashboardSuccess,
receiveMetricsDashboardFailure,
@@ -15,8 +21,6 @@ import {
setGettingStartedEmptyState,
} from '~/monitoring/stores/actions';
import storeState from '~/monitoring/stores/state';
-import testAction from 'spec/helpers/vuex_action_helper';
-import { resetStore } from '../helpers';
import {
deploymentData,
environmentData,
@@ -25,55 +29,108 @@ import {
dashboardGitResponse,
} from '../mock_data';
-describe('Monitoring store actions', () => {
+jest.mock('~/lib/utils/common_utils');
+
+const resetStore = str => {
+ str.replaceState({
+ showEmptyState: true,
+ emptyState: 'loading',
+ groups: [],
+ });
+};
+
+const MAX_REQUESTS = 3;
+
+describe('Monitoring store helpers', () => {
let mock;
+ // Mock underlying `backOff` function to remove in-built delay.
+ backOff.mockImplementation(
+ callback =>
+ new Promise((resolve, reject) => {
+ const stop = arg => (arg instanceof Error ? reject(arg) : resolve(arg));
+ const next = () => callback(next, stop);
+ callback(next, stop);
+ }),
+ );
+
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
- resetStore(store);
mock.restore();
});
+ describe('backOffRequest', () => {
+ it('returns immediately when recieving a 200 status code', () => {
+ mock.onGet(TEST_HOST).reply(200);
+
+ return backOffRequest(() => axios.get(TEST_HOST)).then(() => {
+ expect(mock.history.get.length).toBe(1);
+ });
+ });
+
+ it(`repeats the network call ${MAX_REQUESTS} times when receiving a 204 response`, done => {
+ mock.onGet(TEST_HOST).reply(statusCodes.NO_CONTENT, {});
+
+ backOffRequest(() => axios.get(TEST_HOST))
+ .then(done.fail)
+ .catch(() => {
+ expect(mock.history.get.length).toBe(MAX_REQUESTS);
+ done();
+ });
+ });
+ });
+});
+
+describe('Monitoring store actions', () => {
+ let mock;
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+ afterEach(() => {
+ resetStore(store);
+ mock.restore();
+ });
describe('requestMetricsData', () => {
it('sets emptyState to loading', () => {
- const commit = jasmine.createSpy();
+ const commit = jest.fn();
const { state } = store;
-
- requestMetricsData({ state, commit });
-
+ requestMetricsData({
+ state,
+ commit,
+ });
expect(commit).toHaveBeenCalledWith(types.REQUEST_METRICS_DATA);
});
});
-
describe('fetchDeploymentsData', () => {
it('commits RECEIVE_DEPLOYMENTS_DATA_SUCCESS on error', done => {
- const dispatch = jasmine.createSpy();
+ const dispatch = jest.fn();
const { state } = store;
state.deploymentsEndpoint = '/success';
-
mock.onGet(state.deploymentsEndpoint).reply(200, {
deployments: deploymentData,
});
-
- fetchDeploymentsData({ state, dispatch })
+ fetchDeploymentsData({
+ state,
+ dispatch,
+ })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataSuccess', deploymentData);
done();
})
.catch(done.fail);
});
-
it('commits RECEIVE_DEPLOYMENTS_DATA_FAILURE on error', done => {
- const dispatch = jasmine.createSpy();
+ const dispatch = jest.fn();
const { state } = store;
state.deploymentsEndpoint = '/error';
-
mock.onGet(state.deploymentsEndpoint).reply(500);
-
- fetchDeploymentsData({ state, dispatch })
+ fetchDeploymentsData({
+ state,
+ dispatch,
+ })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataFailure');
done();
@@ -81,33 +138,33 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
});
-
describe('fetchEnvironmentsData', () => {
it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on error', done => {
- const dispatch = jasmine.createSpy();
+ const dispatch = jest.fn();
const { state } = store;
state.environmentsEndpoint = '/success';
-
mock.onGet(state.environmentsEndpoint).reply(200, {
environments: environmentData,
});
-
- fetchEnvironmentsData({ state, dispatch })
+ fetchEnvironmentsData({
+ state,
+ dispatch,
+ })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataSuccess', environmentData);
done();
})
.catch(done.fail);
});
-
it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', done => {
- const dispatch = jasmine.createSpy();
+ const dispatch = jest.fn();
const { state } = store;
state.environmentsEndpoint = '/error';
-
mock.onGet(state.environmentsEndpoint).reply(500);
-
- fetchEnvironmentsData({ state, dispatch })
+ fetchEnvironmentsData({
+ state,
+ dispatch,
+ })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
done();
@@ -115,14 +172,11 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
});
-
describe('Set endpoints', () => {
let mockedState;
-
beforeEach(() => {
mockedState = storeState();
});
-
it('should commit SET_ENDPOINTS mutation', done => {
testAction(
setEndpoints,
@@ -147,42 +201,45 @@ describe('Monitoring store actions', () => {
);
});
});
-
describe('Set empty states', () => {
let mockedState;
-
beforeEach(() => {
mockedState = storeState();
});
-
it('should commit SET_METRICS_ENDPOINT mutation', done => {
testAction(
setGettingStartedEmptyState,
null,
mockedState,
- [{ type: types.SET_GETTING_STARTED_EMPTY_STATE }],
+ [
+ {
+ type: types.SET_GETTING_STARTED_EMPTY_STATE,
+ },
+ ],
[],
done,
);
});
});
-
describe('fetchDashboard', () => {
let dispatch;
let state;
const response = metricsDashboardResponse;
-
beforeEach(() => {
- dispatch = jasmine.createSpy();
+ dispatch = jest.fn();
state = storeState();
state.dashboardEndpoint = '/dashboard';
});
-
it('dispatches receive and success actions', done => {
const params = {};
mock.onGet(state.dashboardEndpoint).reply(200, response);
-
- fetchDashboard({ state, dispatch }, params)
+ fetchDashboard(
+ {
+ state,
+ dispatch,
+ },
+ params,
+ )
.then(() => {
expect(dispatch).toHaveBeenCalledWith('requestMetricsDashboard');
expect(dispatch).toHaveBeenCalledWith('receiveMetricsDashboardSuccess', {
@@ -193,12 +250,16 @@ describe('Monitoring store actions', () => {
})
.catch(done.fail);
});
-
it('dispatches failure action', done => {
const params = {};
mock.onGet(state.dashboardEndpoint).reply(500);
-
- fetchDashboard({ state, dispatch }, params)
+ fetchDashboard(
+ {
+ state,
+ dispatch,
+ },
+ params,
+ )
.then(() => {
expect(dispatch).toHaveBeenCalledWith(
'receiveMetricsDashboardFailure',
@@ -209,77 +270,92 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
});
-
describe('receiveMetricsDashboardSuccess', () => {
let commit;
let dispatch;
let state;
-
beforeEach(() => {
- commit = jasmine.createSpy();
- dispatch = jasmine.createSpy();
+ commit = jest.fn();
+ dispatch = jest.fn();
state = storeState();
});
-
it('stores groups ', () => {
const params = {};
const response = metricsDashboardResponse;
-
- receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response, params });
-
+ receiveMetricsDashboardSuccess(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ {
+ response,
+ params,
+ },
+ );
expect(commit).toHaveBeenCalledWith(
types.RECEIVE_METRICS_DATA_SUCCESS,
metricsDashboardResponse.dashboard.panel_groups,
);
-
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
});
-
it('sets the dashboards loaded from the repository', () => {
const params = {};
const response = metricsDashboardResponse;
-
response.all_dashboards = dashboardGitResponse;
- receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response, params });
-
+ receiveMetricsDashboardSuccess(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ {
+ response,
+ params,
+ },
+ );
expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
});
});
-
describe('receiveMetricsDashboardFailure', () => {
let commit;
-
beforeEach(() => {
- commit = jasmine.createSpy();
+ commit = jest.fn();
});
-
it('commits failure action', () => {
- receiveMetricsDashboardFailure({ commit });
-
+ receiveMetricsDashboardFailure({
+ commit,
+ });
expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, undefined);
});
-
it('commits failure action with error', () => {
- receiveMetricsDashboardFailure({ commit }, 'uh-oh');
-
+ receiveMetricsDashboardFailure(
+ {
+ commit,
+ },
+ 'uh-oh',
+ );
expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, 'uh-oh');
});
});
-
describe('fetchPrometheusMetrics', () => {
let commit;
let dispatch;
-
beforeEach(() => {
- commit = jasmine.createSpy();
- dispatch = jasmine.createSpy();
+ commit = jest.fn();
+ dispatch = jest.fn();
});
-
it('commits empty state when state.groups is empty', done => {
const state = storeState();
const params = {};
-
- fetchPrometheusMetrics({ state, commit, dispatch }, params)
+ fetchPrometheusMetrics(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ params,
+ )
.then(() => {
expect(commit).toHaveBeenCalledWith(types.SET_NO_DATA_EMPTY_STATE);
expect(dispatch).not.toHaveBeenCalled();
@@ -287,49 +363,54 @@ describe('Monitoring store actions', () => {
})
.catch(done.fail);
});
-
it('dispatches fetchPrometheusMetric for each panel query', done => {
const params = {};
const state = storeState();
- state.groups = metricsDashboardResponse.dashboard.panel_groups;
-
- const metric = state.groups[0].panels[0].metrics[0];
-
- fetchPrometheusMetrics({ state, commit, dispatch }, params)
+ state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
+ const metric = state.dashboard.panel_groups[0].panels[0].metrics[0];
+ fetchPrometheusMetrics(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ params,
+ )
.then(() => {
- expect(dispatch.calls.count()).toEqual(3);
- expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', { metric, params });
+ expect(dispatch).toHaveBeenCalledTimes(3);
+ expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
+ metric,
+ params,
+ });
done();
})
.catch(done.fail);
-
done();
});
});
-
describe('fetchPrometheusMetric', () => {
it('commits prometheus query result', done => {
- const commit = jasmine.createSpy();
+ const commit = jest.fn();
const params = {
start: '2019-08-06T12:40:02.184Z',
end: '2019-08-06T20:40:02.184Z',
};
const metric = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics[0];
const state = storeState();
-
- const data = metricsGroupsAPIResponse.data[0].metrics[0].queries[0];
- const response = { data };
+ const data = metricsGroupsAPIResponse[0].panels[0].metrics[0];
+ const response = {
+ data,
+ };
mock.onGet('http://test').reply(200, response);
-
- fetchPrometheusMetric({ state, commit }, { metric, params });
-
- setTimeout(() => {
- expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
- metricId: metric.metric_id,
- result: data.result,
- });
- done();
- });
+ fetchPrometheusMetric({ state, commit }, { metric, params })
+ .then(() => {
+ expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
+ metricId: metric.metric_id,
+ result: data.result,
+ });
+ done();
+ })
+ .catch(done.fail);
});
});
});
diff --git a/spec/javascripts/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index bdddd83358c..fdad290a8d6 100644
--- a/spec/javascripts/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -11,104 +11,62 @@ import { uniqMetricsId } from '~/monitoring/stores/utils';
describe('Monitoring mutations', () => {
let stateCopy;
-
beforeEach(() => {
stateCopy = state();
});
-
- describe(types.RECEIVE_METRICS_DATA_SUCCESS, () => {
+ describe('RECEIVE_METRICS_DATA_SUCCESS', () => {
let groups;
-
beforeEach(() => {
- stateCopy.groups = [];
- groups = metricsGroupsAPIResponse.data;
+ stateCopy.dashboard.panel_groups = [];
+ groups = metricsGroupsAPIResponse;
});
-
- it('normalizes values', () => {
+ it('adds a key to the group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
- const expectedTimestamp = '2017-05-25T08:22:34.925Z';
- const expectedValue = 0.0010794445585559514;
- const [timestamp, value] = stateCopy.groups[0].metrics[0].queries[0].result[0].values[0];
-
- expect(timestamp).toEqual(expectedTimestamp);
- expect(value).toEqual(expectedValue);
+ expect(stateCopy.dashboard.panel_groups[0].key).toBe('system-metrics-kubernetes--0');
});
-
- it('contains two groups that contains, one of which has two queries sorted by priority', () => {
+ it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
- expect(stateCopy.groups).toBeDefined();
- expect(stateCopy.groups.length).toEqual(2);
- expect(stateCopy.groups[0].metrics.length).toEqual(2);
+ const expectedLabel = 'Pod average';
+ const { label, query_range } = stateCopy.dashboard.panel_groups[0].metrics[0].metrics[0];
+ expect(label).toEqual(expectedLabel);
+ expect(query_range.length).toBeGreaterThan(0);
});
-
- it('assigns queries a metric id', () => {
+ it('contains one group, which it has two panels and one metrics property', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
- expect(stateCopy.groups[1].metrics[0].queries[0].metricId).toEqual('100');
+ expect(stateCopy.dashboard.panel_groups).toBeDefined();
+ expect(stateCopy.dashboard.panel_groups.length).toEqual(1);
+ expect(stateCopy.dashboard.panel_groups[0].panels.length).toEqual(2);
+ expect(stateCopy.dashboard.panel_groups[0].panels[0].metrics.length).toEqual(1);
+ expect(stateCopy.dashboard.panel_groups[0].panels[1].metrics.length).toEqual(1);
});
-
- it('removes the data if all the values from a query are not defined', () => {
+ it('assigns queries a metric id', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
- expect(stateCopy.groups[1].metrics[0].queries[0].result.length).toEqual(0);
- });
-
- it('assigns metric id of null if metric has no id', () => {
- stateCopy.groups = [];
- const noId = groups.map(group => ({
- ...group,
- ...{
- metrics: group.metrics.map(metric => {
- const { id, ...metricWithoutId } = metric;
-
- return metricWithoutId;
- }),
- },
- }));
-
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, noId);
-
- stateCopy.groups.forEach(group => {
- group.metrics.forEach(metric => {
- expect(metric.queries.every(query => query.metricId === null)).toBe(true);
- });
- });
+ expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries[0].metricId).toEqual(
+ '17_system_metrics_kubernetes_container_memory_average',
+ );
});
-
- describe('dashboard endpoint enabled', () => {
+ describe('dashboard endpoint', () => {
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
-
- beforeEach(() => {
- stateCopy.useDashboardEndpoint = true;
- });
-
it('aliases group panels to metrics for backwards compatibility', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
-
- expect(stateCopy.groups[0].metrics[0]).toBeDefined();
+ expect(stateCopy.dashboard.panel_groups[0].metrics[0]).toBeDefined();
});
-
it('aliases panel metrics to queries for backwards compatibility', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
-
- expect(stateCopy.groups[0].metrics[0].queries).toBeDefined();
+ expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries).toBeDefined();
});
});
});
- describe(types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS, () => {
+ describe('RECEIVE_DEPLOYMENTS_DATA_SUCCESS', () => {
it('stores the deployment data', () => {
stateCopy.deploymentData = [];
mutations[types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS](stateCopy, deploymentData);
-
expect(stateCopy.deploymentData).toBeDefined();
expect(stateCopy.deploymentData.length).toEqual(3);
expect(typeof stateCopy.deploymentData[0]).toEqual('object');
});
});
-
describe('SET_ENDPOINTS', () => {
it('should set all the endpoints', () => {
mutations[types.SET_ENDPOINTS](stateCopy, {
@@ -118,7 +76,6 @@ describe('Monitoring mutations', () => {
dashboardEndpoint: 'dashboard.json',
projectPath: '/gitlab-org/gitlab-foss',
});
-
expect(stateCopy.metricsEndpoint).toEqual('additional_metrics.json');
expect(stateCopy.environmentsEndpoint).toEqual('environments.json');
expect(stateCopy.deploymentsEndpoint).toEqual('deployments.json');
@@ -126,51 +83,59 @@ describe('Monitoring mutations', () => {
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
});
-
describe('SET_QUERY_RESULT', () => {
const metricId = 12;
const id = 'system_metrics_kubernetes_container_memory_total';
- const result = [{ values: [[0, 1], [1, 1], [1, 3]] }];
-
+ const result = [
+ {
+ values: [[0, 1], [1, 1], [1, 3]],
+ },
+ ];
beforeEach(() => {
- stateCopy.useDashboardEndpoint = true;
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
});
-
it('clears empty state', () => {
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId,
result,
});
-
expect(stateCopy.showEmptyState).toBe(false);
});
-
it('sets metricsWithData value', () => {
- const uniqId = uniqMetricsId({ metric_id: metricId, id });
+ const uniqId = uniqMetricsId({
+ metric_id: metricId,
+ id,
+ });
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId: uniqId,
result,
});
-
expect(stateCopy.metricsWithData).toEqual([uniqId]);
});
-
it('does not store empty results', () => {
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId,
result: [],
});
-
expect(stateCopy.metricsWithData).toEqual([]);
});
});
-
describe('SET_ALL_DASHBOARDS', () => {
- it('stores the dashboards loaded from the git repository', () => {
- mutations[types.SET_ALL_DASHBOARDS](stateCopy, dashboardGitResponse);
+ it('stores `undefined` dashboards as an empty array', () => {
+ mutations[types.SET_ALL_DASHBOARDS](stateCopy, undefined);
+ expect(stateCopy.allDashboards).toEqual([]);
+ });
+
+ it('stores `null` dashboards as an empty array', () => {
+ mutations[types.SET_ALL_DASHBOARDS](stateCopy, null);
+
+ expect(stateCopy.allDashboards).toEqual([]);
+ });
+
+ it('stores dashboards loaded from the git repository', () => {
+ mutations[types.SET_ALL_DASHBOARDS](stateCopy, dashboardGitResponse);
expect(stateCopy.allDashboards).toEqual(dashboardGitResponse);
});
});
diff --git a/spec/javascripts/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index 98388ac19f8..98388ac19f8 100644
--- a/spec/javascripts/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
new file mode 100644
index 00000000000..45b99b71e06
--- /dev/null
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -0,0 +1,331 @@
+import $ from 'jquery';
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import Autosize from 'autosize';
+import axios from '~/lib/utils/axios_utils';
+import createStore from '~/notes/stores';
+import CommentForm from '~/notes/components/comment_form.vue';
+import * as constants from '~/notes/constants';
+import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
+import { trimText } from 'helpers/text_helper';
+import { keyboardDownEvent } from '../../issue_show/helpers';
+import {
+ loggedOutnoteableData,
+ notesDataMock,
+ userDataMock,
+ noteableDataMock,
+} from '../../notes/mock_data';
+
+jest.mock('autosize');
+jest.mock('~/commons/nav/user_merge_requests');
+jest.mock('~/gl_form');
+
+describe('issue_comment_form component', () => {
+ let store;
+ let wrapper;
+ let axiosMock;
+
+ const setupStore = (userData, noteableData) => {
+ store.dispatch('setUserData', userData);
+ store.dispatch('setNoteableData', noteableData);
+ store.dispatch('setNotesData', notesDataMock);
+ };
+
+ const mountComponent = (noteableType = 'issue') => {
+ wrapper = mount(CommentForm, {
+ propsData: {
+ noteableType,
+ },
+ store,
+ sync: false,
+ });
+ };
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ store = createStore();
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ wrapper.destroy();
+ jest.clearAllMocks();
+ });
+
+ describe('user is logged in', () => {
+ beforeEach(() => {
+ setupStore(userDataMock, noteableDataMock);
+
+ mountComponent();
+ });
+
+ it('should render user avatar with link', () => {
+ expect(wrapper.find('.timeline-icon .user-avatar-link').attributes('href')).toEqual(
+ userDataMock.path,
+ );
+ });
+
+ describe('handleSave', () => {
+ it('should request to save note when note is entered', () => {
+ wrapper.vm.note = 'hello world';
+ jest.spyOn(wrapper.vm, 'saveNote').mockReturnValue(new Promise(() => {}));
+ jest.spyOn(wrapper.vm, 'resizeTextarea');
+ jest.spyOn(wrapper.vm, 'stopPolling');
+
+ wrapper.vm.handleSave();
+
+ expect(wrapper.vm.isSubmitting).toEqual(true);
+ expect(wrapper.vm.note).toEqual('');
+ expect(wrapper.vm.saveNote).toHaveBeenCalled();
+ expect(wrapper.vm.stopPolling).toHaveBeenCalled();
+ expect(wrapper.vm.resizeTextarea).toHaveBeenCalled();
+ });
+
+ it('should toggle issue state when no note', () => {
+ jest.spyOn(wrapper.vm, 'toggleIssueState');
+
+ wrapper.vm.handleSave();
+
+ expect(wrapper.vm.toggleIssueState).toHaveBeenCalled();
+ });
+
+ it('should disable action button whilst submitting', done => {
+ const saveNotePromise = Promise.resolve();
+ wrapper.vm.note = 'hello world';
+ jest.spyOn(wrapper.vm, 'saveNote').mockReturnValue(saveNotePromise);
+ jest.spyOn(wrapper.vm, 'stopPolling');
+
+ const actionButton = wrapper.find('.js-action-button');
+
+ wrapper.vm.handleSave();
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(actionButton.vm.disabled).toBeTruthy();
+ })
+ .then(saveNotePromise)
+ .then(wrapper.vm.$nextTick)
+ .then(() => {
+ expect(actionButton.vm.disabled).toBeFalsy();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('textarea', () => {
+ it('should render textarea with placeholder', () => {
+ expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual(
+ 'Write a comment or drag your files here…',
+ );
+ });
+
+ it('should make textarea disabled while requesting', done => {
+ const $submitButton = $(wrapper.find('.js-comment-submit-button').element);
+ wrapper.vm.note = 'hello world';
+ jest.spyOn(wrapper.vm, 'stopPolling');
+ jest.spyOn(wrapper.vm, 'saveNote').mockReturnValue(new Promise(() => {}));
+
+ wrapper.vm.$nextTick(() => {
+ // Wait for wrapper.vm.note change triggered. It should enable $submitButton.
+ $submitButton.trigger('click');
+
+ wrapper.vm.$nextTick(() => {
+ // Wait for wrapper.isSubmitting triggered. It should disable textarea.
+ expect(wrapper.find('.js-main-target-form textarea').attributes('disabled')).toBe(
+ 'disabled',
+ );
+ done();
+ });
+ });
+ });
+
+ it('should support quick actions', () => {
+ expect(
+ wrapper.find('.js-main-target-form textarea').attributes('data-supports-quick-actions'),
+ ).toBe('true');
+ });
+
+ it('should link to markdown docs', () => {
+ const { markdownDocsPath } = notesDataMock;
+
+ expect(
+ wrapper
+ .find(`a[href="${markdownDocsPath}"]`)
+ .text()
+ .trim(),
+ ).toEqual('Markdown');
+ });
+
+ it('should link to quick actions docs', () => {
+ const { quickActionsDocsPath } = notesDataMock;
+
+ expect(
+ wrapper
+ .find(`a[href="${quickActionsDocsPath}"]`)
+ .text()
+ .trim(),
+ ).toEqual('quick actions');
+ });
+
+ it('should resize textarea after note discarded', done => {
+ jest.spyOn(wrapper.vm, 'discard');
+
+ wrapper.vm.note = 'foo';
+ wrapper.vm.discard();
+
+ wrapper.vm.$nextTick(() => {
+ expect(Autosize.update).toHaveBeenCalled();
+ done();
+ });
+ });
+
+ describe('edit mode', () => {
+ it('should enter edit mode when arrow up is pressed', () => {
+ jest.spyOn(wrapper.vm, 'editCurrentUserLastNote');
+ wrapper.find('.js-main-target-form textarea').value = 'Foo';
+ wrapper
+ .find('.js-main-target-form textarea')
+ .element.dispatchEvent(keyboardDownEvent(38, true));
+
+ expect(wrapper.vm.editCurrentUserLastNote).toHaveBeenCalled();
+ });
+
+ it('inits autosave', () => {
+ expect(wrapper.vm.autosave).toBeDefined();
+ expect(wrapper.vm.autosave.key).toEqual(`autosave/Note/Issue/${noteableDataMock.id}`);
+ });
+ });
+
+ describe('event enter', () => {
+ it('should save note when cmd+enter is pressed', () => {
+ jest.spyOn(wrapper.vm, 'handleSave');
+ wrapper.find('.js-main-target-form textarea').value = 'Foo';
+ wrapper
+ .find('.js-main-target-form textarea')
+ .element.dispatchEvent(keyboardDownEvent(13, true));
+
+ expect(wrapper.vm.handleSave).toHaveBeenCalled();
+ });
+
+ it('should save note when ctrl+enter is pressed', () => {
+ jest.spyOn(wrapper.vm, 'handleSave');
+ wrapper.find('.js-main-target-form textarea').value = 'Foo';
+ wrapper
+ .find('.js-main-target-form textarea')
+ .element.dispatchEvent(keyboardDownEvent(13, false, true));
+
+ expect(wrapper.vm.handleSave).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('actions', () => {
+ it('should be possible to close the issue', () => {
+ expect(
+ wrapper
+ .find('.btn-comment-and-close')
+ .text()
+ .trim(),
+ ).toEqual('Close issue');
+ });
+
+ it('should render comment button as disabled', () => {
+ expect(wrapper.find('.js-comment-submit-button').attributes('disabled')).toEqual(
+ 'disabled',
+ );
+ });
+
+ it('should enable comment button if it has note', done => {
+ wrapper.vm.note = 'Foo';
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.js-comment-submit-button').attributes('disabled')).toBeFalsy();
+ done();
+ });
+ });
+
+ it('should update buttons texts when it has note', done => {
+ wrapper.vm.note = 'Foo';
+ wrapper.vm.$nextTick(() => {
+ expect(
+ wrapper
+ .find('.btn-comment-and-close')
+ .text()
+ .trim(),
+ ).toEqual('Comment & close issue');
+
+ done();
+ });
+ });
+
+ it('updates button text with noteable type', done => {
+ wrapper.setProps({ noteableType: constants.MERGE_REQUEST_NOTEABLE_TYPE });
+
+ wrapper.vm.$nextTick(() => {
+ expect(
+ wrapper
+ .find('.btn-comment-and-close')
+ .text()
+ .trim(),
+ ).toEqual('Close merge request');
+ done();
+ });
+ });
+
+ describe('when clicking close/reopen button', () => {
+ it('should disable button and show a loading spinner', done => {
+ const toggleStateButton = wrapper.find('.js-action-button');
+
+ toggleStateButton.trigger('click');
+ wrapper.vm.$nextTick(() => {
+ expect(toggleStateButton.element.disabled).toEqual(true);
+ expect(toggleStateButton.find('.js-loading-button-icon').exists()).toBe(true);
+
+ done();
+ });
+ });
+ });
+
+ describe('when toggling state', () => {
+ it('should update MR count', done => {
+ jest.spyOn(wrapper.vm, 'closeIssue').mockResolvedValue();
+
+ wrapper.vm.toggleIssueState();
+
+ wrapper.vm.$nextTick(() => {
+ expect(refreshUserMergeRequestCounts).toHaveBeenCalled();
+
+ done();
+ });
+ });
+ });
+ });
+
+ describe('issue is confidential', () => {
+ it('shows information warning', done => {
+ store.dispatch('setNoteableData', Object.assign(noteableDataMock, { confidential: true }));
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.confidential-issue-warning')).toBeDefined();
+ done();
+ });
+ });
+ });
+ });
+
+ describe('user is not logged in', () => {
+ beforeEach(() => {
+ setupStore(null, loggedOutnoteableData);
+
+ mountComponent();
+ });
+
+ it('should render signed out widget', () => {
+ expect(trimText(wrapper.text())).toEqual('Please register or sign in to reply');
+ });
+
+ it('should not render submission form', () => {
+ expect(wrapper.find('textarea').exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/notes/components/diff_discussion_header_spec.js b/spec/frontend/notes/components/diff_discussion_header_spec.js
new file mode 100644
index 00000000000..f90147f9105
--- /dev/null
+++ b/spec/frontend/notes/components/diff_discussion_header_spec.js
@@ -0,0 +1,141 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+
+import createStore from '~/notes/stores';
+import diffDiscussionHeader from '~/notes/components/diff_discussion_header.vue';
+
+import { discussionMock } from '../../../javascripts/notes/mock_data';
+import mockDiffFile from '../../diffs/mock_data/diff_discussions';
+
+const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
+
+describe('diff_discussion_header component', () => {
+ let store;
+ let wrapper;
+
+ preloadFixtures(discussionWithTwoUnresolvedNotes);
+
+ beforeEach(() => {
+ window.mrTabs = {};
+ store = createStore();
+
+ const localVue = createLocalVue();
+ wrapper = mount(diffDiscussionHeader, {
+ store,
+ propsData: { discussion: discussionMock },
+ localVue,
+ sync: false,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render user avatar', () => {
+ const discussion = { ...discussionMock };
+ discussion.diff_file = mockDiffFile;
+ discussion.diff_discussion = true;
+
+ wrapper.setProps({ discussion });
+
+ expect(wrapper.find('.user-avatar-link').exists()).toBe(true);
+ });
+
+ describe('action text', () => {
+ const commitId = 'razupaltuff';
+ const truncatedCommitId = commitId.substr(0, 8);
+ let commitElement;
+
+ beforeEach(done => {
+ store.state.diffs = {
+ projectPath: 'something',
+ };
+
+ wrapper.setProps({
+ discussion: {
+ ...discussionMock,
+ for_commit: true,
+ commit_id: commitId,
+ diff_discussion: true,
+ diff_file: {
+ ...mockDiffFile,
+ },
+ },
+ });
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ commitElement = wrapper.find('.commit-sha');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ describe('for diff threads without a commit id', () => {
+ it('should show started a thread on the diff text', done => {
+ Object.assign(wrapper.vm.discussion, {
+ for_commit: false,
+ commit_id: null,
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.text()).toContain('started a thread on the diff');
+
+ done();
+ });
+ });
+
+ it('should show thread on older version text', done => {
+ Object.assign(wrapper.vm.discussion, {
+ for_commit: false,
+ commit_id: null,
+ active: false,
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.text()).toContain('started a thread on an old version of the diff');
+
+ done();
+ });
+ });
+ });
+
+ describe('for commit threads', () => {
+ it('should display a monospace started a thread on commit', () => {
+ expect(wrapper.text()).toContain(`started a thread on commit ${truncatedCommitId}`);
+ expect(commitElement.exists()).toBe(true);
+ expect(commitElement.text()).toContain(truncatedCommitId);
+ });
+ });
+
+ describe('for diff thread with a commit id', () => {
+ it('should display started thread on commit header', done => {
+ wrapper.vm.discussion.for_commit = false;
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.text()).toContain(`started a thread on commit ${truncatedCommitId}`);
+
+ expect(commitElement).not.toBe(null);
+
+ done();
+ });
+ });
+
+ it('should display outdated change on commit header', done => {
+ wrapper.vm.discussion.for_commit = false;
+ wrapper.vm.discussion.active = false;
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.text()).toContain(
+ `started a thread on an outdated change in commit ${truncatedCommitId}`,
+ );
+
+ expect(commitElement).not.toBe(null);
+
+ done();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index d3c8cf72376..91f9dab2530 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -1,6 +1,6 @@
import createStore from '~/notes/stores';
import { shallowMount, mount, createLocalVue } from '@vue/test-utils';
-import { discussionMock } from '../../../javascripts/notes/mock_data';
+import { discussionMock } from '../../notes/mock_data';
import DiscussionActions from '~/notes/components/discussion_actions.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveDiscussionButton from '~/notes/components/discussion_resolve_button.vue';
diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js
index 58d367077e8..f77236b14bc 100644
--- a/spec/frontend/notes/components/discussion_notes_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_spec.js
@@ -8,11 +8,7 @@ import PlaceholderSystemNote from '~/vue_shared/components/notes/placeholder_sys
import SystemNote from '~/vue_shared/components/notes/system_note.vue';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import createStore from '~/notes/stores';
-import {
- noteableDataMock,
- discussionMock,
- notesDataMock,
-} from '../../../javascripts/notes/mock_data';
+import { noteableDataMock, discussionMock, notesDataMock } from '../../notes/mock_data';
const localVue = createLocalVue();
diff --git a/spec/frontend/notes/components/note_app_spec.js b/spec/frontend/notes/components/note_app_spec.js
index a8ec47fd44f..3716b349210 100644
--- a/spec/frontend/notes/components/note_app_spec.js
+++ b/spec/frontend/notes/components/note_app_spec.js
@@ -9,7 +9,8 @@ import createStore from '~/notes/stores';
import '~/behaviors/markdown/render_gfm';
import { setTestTimeout } from 'helpers/timeout';
// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491)
-import * as mockData from '../../../javascripts/notes/mock_data';
+import * as mockData from '../../notes/mock_data';
+import * as urlUtility from '~/lib/utils/url_utility';
setTestTimeout(1000);
@@ -54,7 +55,9 @@ describe('note_app', () => {
components: {
NotesApp,
},
- template: '<div class="js-vue-notes-event"><notes-app v-bind="$attrs" /></div>',
+ template: `<div class="js-vue-notes-event">
+ <notes-app ref="notesApp" v-bind="$attrs" />
+ </div>`,
},
{
attachToDocument: true,
@@ -313,4 +316,23 @@ describe('note_app', () => {
});
});
});
+
+ describe('mounted', () => {
+ beforeEach(() => {
+ axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
+ wrapper = mountComponent();
+ return waitForDiscussionsRequest();
+ });
+
+ it('should listen hashchange event', () => {
+ const notesApp = wrapper.find(NotesApp);
+ const hash = 'some dummy hash';
+ jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce(hash);
+ const setTargetNoteHash = jest.spyOn(notesApp.vm, 'setTargetNoteHash');
+
+ window.dispatchEvent(new Event('hashchange'), hash);
+
+ expect(setTargetNoteHash).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
new file mode 100644
index 00000000000..01cb70d395c
--- /dev/null
+++ b/spec/frontend/notes/mock_data.js
@@ -0,0 +1,1255 @@
+// Copied to ee/spec/frontend/notes/mock_data.js
+
+export const notesDataMock = {
+ discussionsPath: '/gitlab-org/gitlab-foss/issues/26/discussions.json',
+ lastFetchedAt: 1501862675,
+ markdownDocsPath: '/help/user/markdown',
+ newSessionPath: '/users/sign_in?redirect_to_referer=yes',
+ notesPath: '/gitlab-org/gitlab-foss/noteable/issue/98/notes',
+ quickActionsDocsPath: '/help/user/project/quick_actions',
+ registerPath: '/users/sign_in?redirect_to_referer=yes#register-pane',
+ prerenderedNotesCount: 1,
+ closePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=close',
+ reopenPath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=reopen',
+ canAwardEmoji: true,
+};
+
+export const userDataMock = {
+ avatar_url: 'mock_path',
+ id: 1,
+ name: 'Root',
+ path: '/root',
+ state: 'active',
+ username: 'root',
+};
+
+export const noteableDataMock = {
+ assignees: [],
+ author_id: 1,
+ branch_name: null,
+ confidential: false,
+ create_note_path: '/gitlab-org/gitlab-foss/notes?target_id=98&target_type=issue',
+ created_at: '2017-02-07T10:11:18.395Z',
+ current_user: {
+ can_create_note: true,
+ can_update: true,
+ can_award_emoji: true,
+ },
+ description: '',
+ due_date: null,
+ human_time_estimate: null,
+ human_total_time_spent: null,
+ id: 98,
+ iid: 26,
+ labels: [],
+ lock_version: null,
+ milestone: null,
+ milestone_id: null,
+ moved_to_id: null,
+ preview_note_path: '/gitlab-org/gitlab-foss/preview_markdown?target_id=98&target_type=Issue',
+ project_id: 2,
+ state: 'opened',
+ time_estimate: 0,
+ title: '14',
+ total_time_spent: 0,
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ updated_at: '2017-08-04T09:53:01.226Z',
+ updated_by_id: 1,
+ web_url: '/gitlab-org/gitlab-foss/issues/26',
+ noteableType: 'issue',
+};
+
+export const lastFetchedAt = '1501862675';
+
+export const individualNote = {
+ expanded: true,
+ id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ individual_note: true,
+ notes: [
+ {
+ id: '1390',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'test',
+ path: '/root',
+ },
+ created_at: '2017-08-01T17: 09: 33.762Z',
+ updated_at: '2017-08-01T17: 09: 33.762Z',
+ system: false,
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'sdfdsaf',
+ note_html: "<p dir='auto'>sdfdsaf</p>",
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ },
+ discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ emoji_awardable: true,
+ award_emoji: [
+ { name: 'baseball', user: { id: 1, name: 'Root', username: 'root' } },
+ { name: 'art', user: { id: 1, name: 'Root', username: 'root' } },
+ ],
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1390/toggle_award_emoji',
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ note_url: '/group/project/merge_requests/1#note_1',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390&user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1390',
+ },
+ ],
+ reply_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+};
+
+export const note = {
+ id: '546',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2017-08-10T15:24:03.087Z',
+ updated_at: '2017-08-10T15:24:03.087Z',
+ system: false,
+ noteable_id: 67,
+ noteable_type: 'Issue',
+ noteable_iid: 7,
+ type: null,
+ human_access: 'Owner',
+ note: 'Vel id placeat reprehenderit sit numquam.',
+ note_html: '<p dir="auto">Vel id placeat reprehenderit sit numquam.</p>',
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ },
+ discussion_id: 'd3842a451b7f3d9a5dfce329515127b2d29a4cd0',
+ emoji_awardable: true,
+ award_emoji: [
+ {
+ name: 'baseball',
+ user: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ },
+ },
+ {
+ name: 'bath_tone3',
+ user: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ },
+ },
+ ],
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/546/toggle_award_emoji',
+ note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_546&user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/546',
+};
+
+export const discussionMock = {
+ id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
+ reply_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
+ expanded: true,
+ notes: [
+ {
+ id: '1395',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-02T10:51:58.559Z',
+ updated_at: '2017-08-02T10:51:58.559Z',
+ system: false,
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: 'DiscussionNote',
+ human_access: 'Owner',
+ note: 'THIS IS A DICUSSSION!',
+ note_html: "<p dir='auto'>THIS IS A DICUSSSION!</p>",
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ can_resolve: true,
+ },
+ discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
+ emoji_awardable: true,
+ award_emoji: [],
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1395/toggle_award_emoji',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1395&user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1395',
+ },
+ {
+ id: '1396',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-02T10:56:50.980Z',
+ updated_at: '2017-08-03T14:19:35.691Z',
+ system: false,
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: 'DiscussionNote',
+ human_access: 'Owner',
+ note: 'sadfasdsdgdsf',
+ note_html: "<p dir='auto'>sadfasdsdgdsf</p>",
+ last_edited_at: '2017-08-03T14:19:35.691Z',
+ last_edited_by: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ can_resolve: true,
+ },
+ discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
+ emoji_awardable: true,
+ award_emoji: [],
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1396/toggle_award_emoji',
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1396&user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1396',
+ },
+ {
+ id: '1437',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-03T18:11:18.780Z',
+ updated_at: '2017-08-04T09:52:31.062Z',
+ system: false,
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: 'DiscussionNote',
+ human_access: 'Owner',
+ note: 'adsfasf Should disappear',
+ note_html: "<p dir='auto'>adsfasf Should disappear</p>",
+ last_edited_at: '2017-08-04T09:52:31.062Z',
+ last_edited_by: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ can_resolve: true,
+ },
+ discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
+ emoji_awardable: true,
+ award_emoji: [],
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1437/toggle_award_emoji',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1437&user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1437',
+ },
+ ],
+ individual_note: false,
+ resolvable: true,
+ active: true,
+};
+
+export const loggedOutnoteableData = {
+ id: '98',
+ iid: 26,
+ author_id: 1,
+ description: '',
+ lock_version: 1,
+ milestone_id: null,
+ state: 'opened',
+ title: 'asdsa',
+ updated_by_id: 1,
+ created_at: '2017-02-07T10:11:18.395Z',
+ updated_at: '2017-08-08T10:22:51.564Z',
+ time_estimate: 0,
+ total_time_spent: 0,
+ human_time_estimate: null,
+ human_total_time_spent: null,
+ milestone: null,
+ labels: [],
+ branch_name: null,
+ confidential: false,
+ assignees: [
+ {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ web_url: 'http://localhost:3000/root',
+ },
+ ],
+ due_date: null,
+ moved_to_id: null,
+ project_id: 2,
+ web_url: '/gitlab-org/gitlab-foss/issues/26',
+ current_user: {
+ can_create_note: false,
+ can_update: false,
+ },
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ create_note_path: '/gitlab-org/gitlab-foss/notes?target_id=98&target_type=issue',
+ preview_note_path: '/gitlab-org/gitlab-foss/preview_markdown?target_id=98&target_type=Issue',
+};
+
+export const collapseNotesMock = [
+ {
+ expanded: true,
+ id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ individual_note: true,
+ notes: [
+ {
+ id: '1390',
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'test',
+ path: '/root',
+ },
+ created_at: '2018-02-26T18:07:41.071Z',
+ updated_at: '2018-02-26T18:07:41.071Z',
+ system: true,
+ system_note_icon_name: 'pencil',
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false },
+ discussion_id: 'b97fb7bda470a65b3e009377a9032edec0a4dd05',
+ emoji_awardable: false,
+ path: '/h5bp/html5-boilerplate/notes/1057',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
+ },
+ ],
+ },
+ {
+ expanded: true,
+ id: 'ffde43f25984ad7f2b4275135e0e2846875336c0',
+ individual_note: true,
+ notes: [
+ {
+ id: '1391',
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: 'test',
+ path: '/root',
+ },
+ created_at: '2018-02-26T18:13:24.071Z',
+ updated_at: '2018-02-26T18:13:24.071Z',
+ system: true,
+ system_note_icon_name: 'pencil',
+ noteable_id: 99,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false },
+ discussion_id: '3eb958b4d81dec207ec3537a2f3bd8b9f271bb34',
+ emoji_awardable: false,
+ path: '/h5bp/html5-boilerplate/notes/1057',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
+ },
+ ],
+ },
+];
+
+export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
+ GET: {
+ '/gitlab-org/gitlab-foss/issues/26/discussions.json': [
+ {
+ id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ reply_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ expanded: true,
+ notes: [
+ {
+ id: '1390',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-01T17:09:33.762Z',
+ updated_at: '2017-08-01T17:09:33.762Z',
+ system: false,
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'sdfdsaf',
+ note_html: '\u003cp dir="auto"\u003esdfdsaf\u003c/p\u003e',
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ },
+ discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
+ emoji_awardable: true,
+ award_emoji: [
+ {
+ name: 'baseball',
+ user: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ },
+ },
+ {
+ name: 'art',
+ user: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ },
+ },
+ ],
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1390/toggle_award_emoji',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390\u0026user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1390',
+ },
+ ],
+ individual_note: true,
+ },
+ {
+ id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
+ reply_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
+ expanded: true,
+ notes: [
+ {
+ id: '1391',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-02T10:51:38.685Z',
+ updated_at: '2017-08-02T10:51:38.685Z',
+ system: false,
+ noteable_id: 98,
+ noteable_type: 'Issue',
+ type: null,
+ human_access: 'Owner',
+ note: 'New note!',
+ note_html: '\u003cp dir="auto"\u003eNew note!\u003c/p\u003e',
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ },
+ discussion_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
+ emoji_awardable: true,
+ award_emoji: [],
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1391/toggle_award_emoji',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1391\u0026user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1391',
+ },
+ ],
+ individual_note: true,
+ },
+ ],
+ '/gitlab-org/gitlab-foss/noteable/issue/98/notes': {
+ last_fetched_at: 1512900838,
+ notes: [],
+ },
+ },
+ PUT: {
+ '/gitlab-org/gitlab-foss/notes/1471': {
+ commands_changes: null,
+ valid: true,
+ id: '1471',
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-08T16:53:00.666Z',
+ updated_at: '2017-12-10T11:03:21.876Z',
+ system: false,
+ noteable_id: 124,
+ noteable_type: 'Issue',
+ noteable_iid: 29,
+ type: 'DiscussionNote',
+ human_access: 'Owner',
+ note: 'Adding a comment',
+ note_html: '\u003cp dir="auto"\u003eAdding a comment\u003c/p\u003e',
+ last_edited_at: '2017-12-10T11:03:21.876Z',
+ last_edited_by: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ },
+ discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
+ emoji_awardable: true,
+ award_emoji: [],
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1471/toggle_award_emoji',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1471',
+ },
+ },
+};
+
+export const DISCUSSION_NOTE_RESPONSE_MAP = {
+ ...INDIVIDUAL_NOTE_RESPONSE_MAP,
+ GET: {
+ ...INDIVIDUAL_NOTE_RESPONSE_MAP.GET,
+ '/gitlab-org/gitlab-foss/issues/26/discussions.json': [
+ {
+ id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
+ reply_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
+ expanded: true,
+ notes: [
+ {
+ id: '1471',
+ attachment: {
+ url: null,
+ filename: null,
+ image: false,
+ },
+ author: {
+ id: 1,
+ name: 'Root',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ path: '/root',
+ },
+ created_at: '2017-08-08T16:53:00.666Z',
+ updated_at: '2017-08-08T16:53:00.666Z',
+ system: false,
+ noteable_id: 124,
+ noteable_type: 'Issue',
+ noteable_iid: 29,
+ type: 'DiscussionNote',
+ human_access: 'Owner',
+ note: 'Adding a comment',
+ note_html: '\u003cp dir="auto"\u003eAdding a comment\u003c/p\u003e',
+ current_user: {
+ can_edit: true,
+ can_award_emoji: true,
+ },
+ discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
+ emoji_awardable: true,
+ award_emoji: [],
+ toggle_award_path: '/gitlab-org/gitlab-foss/notes/1471/toggle_award_emoji',
+ noteable_note_url: '/group/project/merge_requests/1#note_1',
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1',
+ path: '/gitlab-org/gitlab-foss/notes/1471',
+ },
+ ],
+ individual_note: false,
+ },
+ ],
+ },
+};
+
+export function getIndividualNoteResponse(config) {
+ return [200, INDIVIDUAL_NOTE_RESPONSE_MAP[config.method.toUpperCase()][config.url]];
+}
+
+export function getDiscussionNoteResponse(config) {
+ return [200, DISCUSSION_NOTE_RESPONSE_MAP[config.method.toUpperCase()][config.url]];
+}
+
+export const notesWithDescriptionChanges = [
+ {
+ id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ expanded: true,
+ notes: [
+ {
+ id: '901',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:36.117Z',
+ updated_at: '2018-05-29T12:05:36.117Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
+ note_html:
+ '<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/901',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ expanded: true,
+ notes: [
+ {
+ id: '902',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:58.694Z',
+ updated_at: '2018-05-29T12:05:58.694Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
+ note_html:
+ '<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/902',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '7f1feda384083eb31763366e6392399fde6f3f31',
+ reply_id: '7f1feda384083eb31763366e6392399fde6f3f31',
+ expanded: true,
+ notes: [
+ {
+ id: '903',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:05.772Z',
+ updated_at: '2018-05-29T12:06:05.772Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: '7f1feda384083eb31763366e6392399fde6f3f31',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_903&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/903',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ expanded: true,
+ notes: [
+ {
+ id: '904',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:16.112Z',
+ updated_at: '2018-05-29T12:06:16.112Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'Ullamcorper eget nulla facilisi etiam',
+ note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/904',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ expanded: true,
+ notes: [
+ {
+ id: '905',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:28.851Z',
+ updated_at: '2018-05-29T12:06:28.851Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/905',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ expanded: true,
+ notes: [
+ {
+ id: '906',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:20:02.925Z',
+ updated_at: '2018-05-29T12:20:02.925Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/906',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+];
+
+export const collapsedSystemNotes = [
+ {
+ id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ expanded: true,
+ notes: [
+ {
+ id: '901',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:36.117Z',
+ updated_at: '2018-05-29T12:05:36.117Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
+ note_html:
+ '<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/901',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ expanded: true,
+ notes: [
+ {
+ id: '902',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:05:58.694Z',
+ updated_at: '2018-05-29T12:05:58.694Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note:
+ 'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
+ note_html:
+ '<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/902',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ expanded: true,
+ notes: [
+ {
+ id: '904',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:16.112Z',
+ updated_at: '2018-05-29T12:06:16.112Z',
+ system: false,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'Ullamcorper eget nulla facilisi etiam',
+ note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
+ current_user: { can_edit: true, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
+ emoji_awardable: true,
+ award_emoji: [],
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
+ human_access: 'Owner',
+ toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
+ path: '/gitlab-org/gitlab-shell/notes/904',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ expanded: true,
+ notes: [
+ {
+ id: '905',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:06:28.851Z',
+ updated_at: '2018-05-29T12:06:28.851Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ start_description_version_id: undefined,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/905',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+ {
+ id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ expanded: true,
+ notes: [
+ {
+ id: '906',
+ type: null,
+ attachment: null,
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ path: '/root',
+ },
+ created_at: '2018-05-29T12:20:02.925Z',
+ updated_at: '2018-05-29T12:20:02.925Z',
+ system: true,
+ noteable_id: 182,
+ noteable_type: 'Issue',
+ resolvable: false,
+ noteable_iid: 12,
+ note: 'changed the description',
+ note_html: '<p dir="auto">changed the description</p>',
+ current_user: { can_edit: false, can_award_emoji: true },
+ resolved: false,
+ resolved_by: null,
+ system_note_icon_name: 'pencil-square',
+ discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
+ emoji_awardable: false,
+ report_abuse_path:
+ '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
+ human_access: 'Owner',
+ path: '/gitlab-org/gitlab-shell/notes/906',
+ },
+ ],
+ individual_note: true,
+ resolvable: false,
+ resolved: false,
+ diff_discussion: false,
+ },
+];
+
+export const discussion1 = {
+ id: 'abc1',
+ resolvable: true,
+ resolved: false,
+ active: true,
+ diff_file: {
+ file_path: 'about.md',
+ },
+ position: {
+ new_line: 50,
+ old_line: null,
+ },
+ notes: [
+ {
+ created_at: '2018-07-04T16:25:41.749Z',
+ },
+ ],
+};
+
+export const resolvedDiscussion1 = {
+ id: 'abc1',
+ resolvable: true,
+ resolved: true,
+ diff_file: {
+ file_path: 'about.md',
+ },
+ position: {
+ new_line: 50,
+ old_line: null,
+ },
+ notes: [
+ {
+ created_at: '2018-07-04T16:25:41.749Z',
+ },
+ ],
+};
+
+export const discussion2 = {
+ id: 'abc2',
+ resolvable: true,
+ resolved: false,
+ active: true,
+ diff_file: {
+ file_path: 'README.md',
+ },
+ position: {
+ new_line: null,
+ old_line: 20,
+ },
+ notes: [
+ {
+ created_at: '2018-07-04T12:05:41.749Z',
+ },
+ ],
+};
+
+export const discussion3 = {
+ id: 'abc3',
+ resolvable: true,
+ active: true,
+ resolved: false,
+ diff_file: {
+ file_path: 'README.md',
+ },
+ position: {
+ new_line: 21,
+ old_line: null,
+ },
+ notes: [
+ {
+ created_at: '2018-07-05T17:25:41.749Z',
+ },
+ ],
+};
+
+export const unresolvableDiscussion = {
+ resolvable: false,
+};
+
+export const discussionFiltersMock = [
+ {
+ title: 'Show all activity',
+ value: 0,
+ },
+ {
+ title: 'Show comments only',
+ value: 1,
+ },
+ {
+ title: 'Show system notes only',
+ value: 2,
+ },
+];
diff --git a/spec/frontend/performance_bar/components/add_request_spec.js b/spec/frontend/performance_bar/components/add_request_spec.js
new file mode 100644
index 00000000000..cef264f3915
--- /dev/null
+++ b/spec/frontend/performance_bar/components/add_request_spec.js
@@ -0,0 +1,62 @@
+import AddRequest from '~/performance_bar/components/add_request.vue';
+import { shallowMount } from '@vue/test-utils';
+
+describe('add request form', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(AddRequest);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('hides the input on load', () => {
+ expect(wrapper.find('input').exists()).toBe(false);
+ });
+
+ describe('when clicking the button', () => {
+ beforeEach(() => {
+ wrapper.find('button').trigger('click');
+ });
+
+ it('shows the form', () => {
+ expect(wrapper.find('input').exists()).toBe(true);
+ });
+
+ describe('when pressing escape', () => {
+ beforeEach(() => {
+ wrapper.find('input').trigger('keyup.esc');
+ });
+
+ it('hides the input', () => {
+ expect(wrapper.find('input').exists()).toBe(false);
+ });
+ });
+
+ describe('when submitting the form', () => {
+ beforeEach(() => {
+ wrapper.find('input').setValue('http://gitlab.example.com/users/root/calendar.json');
+ wrapper.find('input').trigger('keyup.enter');
+ });
+
+ it('emits an event to add the request', () => {
+ expect(wrapper.emitted()['add-request']).toBeTruthy();
+ expect(wrapper.emitted()['add-request'][0]).toEqual([
+ 'http://gitlab.example.com/users/root/calendar.json',
+ ]);
+ });
+
+ it('hides the input', () => {
+ expect(wrapper.find('input').exists()).toBe(false);
+ });
+
+ it('clears the value for next time', () => {
+ wrapper.find('button').trigger('click');
+
+ expect(wrapper.find('input').text()).toEqual('');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
new file mode 100644
index 00000000000..38ffe98c79b
--- /dev/null
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -0,0 +1,75 @@
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import ActionComponent from '~/pipelines/components/graph/action_component.vue';
+
+describe('pipeline graph action component', () => {
+ let wrapper;
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ mock.onPost('foo.json').reply(200);
+
+ wrapper = mount(ActionComponent, {
+ propsData: {
+ tooltipText: 'bar',
+ link: 'foo',
+ actionIcon: 'cancel',
+ },
+ sync: false,
+ });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ it('should render the provided title as a bootstrap tooltip', () => {
+ expect(wrapper.attributes('data-original-title')).toBe('bar');
+ });
+
+ it('should update bootstrap tooltip when title changes', done => {
+ wrapper.setProps({ tooltipText: 'changed' });
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.attributes('data-original-title')).toBe('changed');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('should render an svg', () => {
+ expect(wrapper.find('.ci-action-icon-wrapper')).toBeDefined();
+ expect(wrapper.find('svg')).toBeDefined();
+ });
+
+ describe('on click', () => {
+ it('emits `pipelineActionRequestComplete` after a successful request', done => {
+ jest.spyOn(wrapper.vm, '$emit');
+
+ wrapper.find('button').trigger('click');
+
+ waitForPromises()
+ .then(() => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('pipelineActionRequestComplete');
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('renders a loading icon while waiting for request', done => {
+ wrapper.find('button').trigger('click');
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.js-action-icon-loading').exists()).toBe(true);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/pipelines/pipeline_triggerer_spec.js b/spec/frontend/pipelines/pipeline_triggerer_spec.js
index 8cf290f2663..45ac278dd38 100644
--- a/spec/javascripts/pipelines/pipeline_triggerer_spec.js
+++ b/spec/frontend/pipelines/pipeline_triggerer_spec.js
@@ -17,6 +17,7 @@ describe('Pipelines Triggerer', () => {
const createComponent = () => {
wrapper = mount(pipelineTriggerer, {
propsData: mockData,
+ sync: false,
});
};
@@ -49,6 +50,8 @@ describe('Pipelines Triggerer', () => {
},
});
- expect(wrapper.find('.js-pipeline-url-api').text()).toEqual('API');
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.js-pipeline-url-api').text()).toEqual('API');
+ });
});
});
diff --git a/spec/javascripts/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
index d47504d2f54..1c785ec6ffe 100644
--- a/spec/javascripts/pipelines/pipelines_table_row_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_row_spec.js
@@ -1,22 +1,21 @@
-import Vue from 'vue';
-import tableRowComp from '~/pipelines/components/pipelines_table_row.vue';
+import { mount } from '@vue/test-utils';
+import PipelinesTableRowComponent from '~/pipelines/components/pipelines_table_row.vue';
import eventHub from '~/pipelines/event_hub';
describe('Pipelines Table Row', () => {
const jsonFixtureName = 'pipelines/pipelines.json';
- const buildComponent = pipeline => {
- const PipelinesTableRowComponent = Vue.extend(tableRowComp);
- return new PipelinesTableRowComponent({
- el: document.querySelector('.test-dom-element'),
+
+ const createWrapper = pipeline =>
+ mount(PipelinesTableRowComponent, {
propsData: {
pipeline,
autoDevopsHelpPath: 'foo',
viewType: 'root',
},
- }).$mount();
- };
+ sync: false,
+ });
- let component;
+ let wrapper;
let pipeline;
let pipelineWithoutAuthor;
let pipelineWithoutCommit;
@@ -32,28 +31,29 @@ describe('Pipelines Table Row', () => {
});
afterEach(() => {
- component.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
it('should render a table row', () => {
- component = buildComponent(pipeline);
+ wrapper = createWrapper(pipeline);
- expect(component.$el.getAttribute('class')).toContain('gl-responsive-table-row');
+ expect(wrapper.attributes('class')).toContain('gl-responsive-table-row');
});
describe('status column', () => {
beforeEach(() => {
- component = buildComponent(pipeline);
+ wrapper = createWrapper(pipeline);
});
it('should render a pipeline link', () => {
- expect(
- component.$el.querySelector('.table-section.commit-link a').getAttribute('href'),
- ).toEqual(pipeline.path);
+ expect(wrapper.find('.table-section.commit-link a').attributes('href')).toEqual(
+ pipeline.path,
+ );
});
it('should render status text', () => {
- expect(component.$el.querySelector('.table-section.commit-link a').textContent).toContain(
+ expect(wrapper.find('.table-section.commit-link a').text()).toContain(
pipeline.details.status.text,
);
});
@@ -61,33 +61,32 @@ describe('Pipelines Table Row', () => {
describe('information column', () => {
beforeEach(() => {
- component = buildComponent(pipeline);
+ wrapper = createWrapper(pipeline);
});
it('should render a pipeline link', () => {
- expect(
- component.$el.querySelector('.table-section:nth-child(2) a').getAttribute('href'),
- ).toEqual(pipeline.path);
+ expect(wrapper.find('.table-section:nth-child(2) a').attributes('href')).toEqual(
+ pipeline.path,
+ );
});
it('should render pipeline ID', () => {
- expect(
- component.$el.querySelector('.table-section:nth-child(2) a > span').textContent,
- ).toEqual(`#${pipeline.id}`);
+ expect(wrapper.find('.table-section:nth-child(2) a > span').text()).toEqual(
+ `#${pipeline.id}`,
+ );
});
describe('when a user is provided', () => {
it('should render user information', () => {
expect(
- component.$el
- .querySelector('.table-section:nth-child(3) .js-pipeline-url-user')
- .getAttribute('href'),
+ wrapper.find('.table-section:nth-child(3) .js-pipeline-url-user').attributes('href'),
).toEqual(pipeline.user.path);
expect(
- component.$el
- .querySelector('.table-section:nth-child(3) .js-user-avatar-image-toolip')
- .textContent.trim(),
+ wrapper
+ .find('.table-section:nth-child(3) .js-user-avatar-image-toolip')
+ .text()
+ .trim(),
).toEqual(pipeline.user.name);
});
});
@@ -95,40 +94,47 @@ describe('Pipelines Table Row', () => {
describe('commit column', () => {
it('should render link to commit', () => {
- component = buildComponent(pipeline);
+ wrapper = createWrapper(pipeline);
- const commitLink = component.$el.querySelector('.branch-commit .commit-sha');
+ const commitLink = wrapper.find('.branch-commit .commit-sha');
- expect(commitLink.getAttribute('href')).toEqual(pipeline.commit.commit_path);
+ expect(commitLink.attributes('href')).toEqual(pipeline.commit.commit_path);
});
const findElements = () => {
- const commitTitleElement = component.$el.querySelector('.branch-commit .commit-title');
- const commitAuthorElement = commitTitleElement.querySelector('a.avatar-image-container');
+ const commitTitleElement = wrapper.find('.branch-commit .commit-title');
+ const commitAuthorElement = commitTitleElement.find('a.avatar-image-container');
- if (!commitAuthorElement) {
- return { commitAuthorElement };
+ if (!commitAuthorElement.exists()) {
+ return {
+ commitAuthorElement,
+ };
}
- const commitAuthorLink = commitAuthorElement.getAttribute('href');
+ const commitAuthorLink = commitAuthorElement.attributes('href');
const commitAuthorName = commitAuthorElement
- .querySelector('.js-user-avatar-image-toolip')
- .textContent.trim();
-
- return { commitAuthorElement, commitAuthorLink, commitAuthorName };
+ .find('.js-user-avatar-image-toolip')
+ .text()
+ .trim();
+
+ return {
+ commitAuthorElement,
+ commitAuthorLink,
+ commitAuthorName,
+ };
};
it('renders nothing without commit', () => {
expect(pipelineWithoutCommit.commit).toBe(null);
- component = buildComponent(pipelineWithoutCommit);
+ wrapper = createWrapper(pipelineWithoutCommit);
const { commitAuthorElement } = findElements();
- expect(commitAuthorElement).toBe(null);
+ expect(commitAuthorElement.exists()).toBe(false);
});
it('renders commit author', () => {
- component = buildComponent(pipeline);
+ wrapper = createWrapper(pipeline);
const { commitAuthorLink, commitAuthorName } = findElements();
expect(commitAuthorLink).toEqual(pipeline.commit.author.path);
@@ -137,8 +143,8 @@ describe('Pipelines Table Row', () => {
it('renders commit with unregistered author', () => {
expect(pipelineWithoutAuthor.commit.author).toBe(null);
- component = buildComponent(pipelineWithoutAuthor);
+ wrapper = createWrapper(pipelineWithoutAuthor);
const { commitAuthorLink, commitAuthorName } = findElements();
expect(commitAuthorLink).toEqual(`mailto:${pipelineWithoutAuthor.commit.author_email}`);
@@ -148,13 +154,12 @@ describe('Pipelines Table Row', () => {
describe('stages column', () => {
beforeEach(() => {
- component = buildComponent(pipeline);
+ wrapper = createWrapper(pipeline);
});
it('should render an icon for each stage', () => {
expect(
- component.$el.querySelectorAll('.table-section:nth-child(4) .js-builds-dropdown-button')
- .length,
+ wrapper.findAll('.table-section:nth-child(4) .js-builds-dropdown-button').length,
).toEqual(pipeline.details.stages.length);
});
});
@@ -172,44 +177,49 @@ describe('Pipelines Table Row', () => {
withActions.cancel_path = '/cancel';
withActions.retry_path = '/retry';
- component = buildComponent(withActions);
+ wrapper = createWrapper(withActions);
});
it('should render the provided actions', () => {
- expect(component.$el.querySelector('.js-pipelines-retry-button')).not.toBeNull();
- expect(component.$el.querySelector('.js-pipelines-cancel-button')).not.toBeNull();
- const dropdownMenu = component.$el.querySelectorAll('.dropdown-menu');
+ expect(wrapper.find('.js-pipelines-retry-button').exists()).toBe(true);
+ expect(wrapper.find('.js-pipelines-cancel-button').exists()).toBe(true);
+ const dropdownMenu = wrapper.find('.dropdown-menu');
- expect(dropdownMenu).toContainText(scheduledJobAction.name);
+ expect(dropdownMenu.text()).toContain(scheduledJobAction.name);
});
it('emits `retryPipeline` event when retry button is clicked and toggles loading', () => {
eventHub.$on('retryPipeline', endpoint => {
- expect(endpoint).toEqual('/retry');
+ expect(endpoint).toBe('/retry');
});
- component.$el.querySelector('.js-pipelines-retry-button').click();
-
- expect(component.isRetrying).toEqual(true);
+ wrapper.find('.js-pipelines-retry-button').trigger('click');
+ expect(wrapper.vm.isRetrying).toBe(true);
});
it('emits `openConfirmationModal` event when cancel button is clicked and toggles loading', () => {
eventHub.$once('openConfirmationModal', data => {
const { id, ref, commit } = pipeline;
- expect(data.endpoint).toEqual('/cancel');
- expect(data.pipeline).toEqual(jasmine.objectContaining({ id, ref, commit }));
+ expect(data.endpoint).toBe('/cancel');
+ expect(data.pipeline).toEqual(
+ expect.objectContaining({
+ id,
+ ref,
+ commit,
+ }),
+ );
});
- component.$el.querySelector('.js-pipelines-cancel-button').click();
+ wrapper.find('.js-pipelines-cancel-button').trigger('click');
});
it('renders a loading icon when `cancelingPipeline` matches pipeline id', done => {
- component.cancelingPipeline = pipeline.id;
- component
+ wrapper.setProps({ cancelingPipeline: pipeline.id });
+ wrapper.vm
.$nextTick()
.then(() => {
- expect(component.isCancelling).toEqual(true);
+ expect(wrapper.vm.isCancelling).toBe(true);
})
.then(done)
.catch(done.fail);
diff --git a/spec/frontend/pipelines/test_reports/mock_data.js b/spec/frontend/pipelines/test_reports/mock_data.js
new file mode 100644
index 00000000000..b0f22bc63fb
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/mock_data.js
@@ -0,0 +1,123 @@
+import { formatTime } from '~/lib/utils/datetime_utility';
+import { TestStatus } from '~/pipelines/constants';
+
+export const testCases = [
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0.000748,
+ name: 'Test#subtract when a is 1 and b is 2 raises an error',
+ stack_trace: null,
+ status: TestStatus.SUCCESS,
+ system_output: null,
+ },
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0.000064,
+ name: 'Test#subtract when a is 2 and b is 1 returns correct result',
+ stack_trace: null,
+ status: TestStatus.SUCCESS,
+ system_output: null,
+ },
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0.009292,
+ name: 'Test#sum when a is 1 and b is 2 returns summary',
+ stack_trace: null,
+ status: TestStatus.FAILED,
+ system_output:
+ "Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>'",
+ },
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0.00018,
+ name: 'Test#sum when a is 100 and b is 200 returns summary',
+ stack_trace: null,
+ status: TestStatus.FAILED,
+ system_output:
+ "Failure/Error: is_expected.to eq(300)\n\n expected: 300\n got: -100\n\n (compared using ==)\n./spec/test_spec.rb:21:in `block (4 levels) in <top (required)>'",
+ },
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0,
+ name: 'Test#skipped text',
+ stack_trace: null,
+ status: TestStatus.SKIPPED,
+ system_output: null,
+ },
+];
+
+export const testCasesFormatted = [
+ {
+ ...testCases[2],
+ icon: 'status_failed_borderless',
+ formattedTime: formatTime(testCases[0].execution_time * 1000),
+ },
+ {
+ ...testCases[3],
+ icon: 'status_failed_borderless',
+ formattedTime: formatTime(testCases[1].execution_time * 1000),
+ },
+ {
+ ...testCases[4],
+ icon: 'status_skipped_borderless',
+ formattedTime: formatTime(testCases[2].execution_time * 1000),
+ },
+ {
+ ...testCases[0],
+ icon: 'status_success_borderless',
+ formattedTime: formatTime(testCases[3].execution_time * 1000),
+ },
+ {
+ ...testCases[1],
+ icon: 'status_success_borderless',
+ formattedTime: formatTime(testCases[4].execution_time * 1000),
+ },
+];
+
+export const testSuites = [
+ {
+ error_count: 0,
+ failed_count: 2,
+ name: 'rspec:osx',
+ skipped_count: 0,
+ success_count: 2,
+ test_cases: testCases,
+ total_count: 4,
+ total_time: 60,
+ },
+ {
+ error_count: 0,
+ failed_count: 10,
+ name: 'rspec:osx',
+ skipped_count: 0,
+ success_count: 50,
+ test_cases: [],
+ total_count: 60,
+ total_time: 0.010284,
+ },
+];
+
+export const testSuitesFormatted = testSuites.map(x => ({
+ ...x,
+ formattedTime: formatTime(x.total_time * 1000),
+}));
+
+export const testReports = {
+ error_count: 0,
+ failed_count: 2,
+ skipped_count: 0,
+ success_count: 2,
+ test_suites: testSuites,
+ total_count: 4,
+ total_time: 0.010284,
+};
+
+export const testReportsWithNoSuites = {
+ error_count: 0,
+ failed_count: 2,
+ skipped_count: 0,
+ success_count: 2,
+ test_suites: [],
+ total_count: 4,
+ total_time: 0.010284,
+};
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
new file mode 100644
index 00000000000..c1721e12234
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
@@ -0,0 +1,109 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import * as actions from '~/pipelines/stores/test_reports/actions';
+import * as types from '~/pipelines/stores/test_reports/mutation_types';
+import { TEST_HOST } from '../../../helpers/test_constants';
+import testAction from '../../../helpers/vuex_action_helper';
+import createFlash from '~/flash';
+import { testReports } from '../mock_data';
+
+jest.mock('~/flash.js');
+
+describe('Actions TestReports Store', () => {
+ let mock;
+ let state;
+
+ const endpoint = `${TEST_HOST}/test_reports.json`;
+ const defaultState = {
+ endpoint,
+ testReports: {},
+ selectedSuite: {},
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state = defaultState;
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('fetch reports', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/test_reports.json`).replyOnce(200, testReports, {});
+ });
+
+ it('sets testReports and shows tests', done => {
+ testAction(
+ actions.fetchReports,
+ null,
+ state,
+ [{ type: types.SET_REPORTS, payload: testReports }],
+ [{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
+ done,
+ );
+ });
+
+ it('should create flash on API error', done => {
+ testAction(
+ actions.fetchReports,
+ null,
+ {
+ endpoint: null,
+ },
+ [],
+ [{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('set selected suite', () => {
+ const selectedSuite = testReports.test_suites[0];
+
+ it('sets selectedSuite', done => {
+ testAction(
+ actions.setSelectedSuite,
+ selectedSuite,
+ state,
+ [{ type: types.SET_SELECTED_SUITE, payload: selectedSuite }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('remove selected suite', () => {
+ it('sets selectedSuite to {}', done => {
+ testAction(
+ actions.removeSelectedSuite,
+ {},
+ state,
+ [{ type: types.SET_SELECTED_SUITE, payload: {} }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggles loading', () => {
+ it('sets isLoading to true', done => {
+ testAction(actions.toggleLoading, {}, state, [{ type: types.TOGGLE_LOADING }], [], done);
+ });
+
+ it('toggles isLoading to false', done => {
+ testAction(
+ actions.toggleLoading,
+ {},
+ { ...state, isLoading: true },
+ [{ type: types.TOGGLE_LOADING }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/stores/getters_spec.js b/spec/frontend/pipelines/test_reports/stores/getters_spec.js
new file mode 100644
index 00000000000..e630a005409
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/stores/getters_spec.js
@@ -0,0 +1,54 @@
+import * as getters from '~/pipelines/stores/test_reports/getters';
+import { testReports, testSuitesFormatted, testCasesFormatted } from '../mock_data';
+
+describe('Getters TestReports Store', () => {
+ let state;
+
+ const defaultState = {
+ testReports,
+ selectedSuite: testReports.test_suites[0],
+ };
+
+ const emptyState = {
+ testReports: {},
+ selectedSuite: {},
+ };
+
+ beforeEach(() => {
+ state = {
+ testReports,
+ };
+ });
+
+ const setupState = (testState = defaultState) => {
+ state = testState;
+ };
+
+ describe('getTestSuites', () => {
+ it('should return the test suites', () => {
+ setupState();
+
+ expect(getters.getTestSuites(state)).toEqual(testSuitesFormatted);
+ });
+
+ it('should return an empty array when testReports is empty', () => {
+ setupState(emptyState);
+
+ expect(getters.getTestSuites(state)).toEqual([]);
+ });
+ });
+
+ describe('getSuiteTests', () => {
+ it('should return the test cases inside the suite', () => {
+ setupState();
+
+ expect(getters.getSuiteTests(state)).toEqual(testCasesFormatted);
+ });
+
+ it('should return an empty array when testReports is empty', () => {
+ setupState(emptyState);
+
+ expect(getters.getSuiteTests(state)).toEqual([]);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
new file mode 100644
index 00000000000..ad5b7f91163
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
@@ -0,0 +1,63 @@
+import * as types from '~/pipelines/stores/test_reports/mutation_types';
+import mutations from '~/pipelines/stores/test_reports/mutations';
+import { testReports, testSuites } from '../mock_data';
+
+describe('Mutations TestReports Store', () => {
+ let mockState;
+
+ const defaultState = {
+ endpoint: '',
+ testReports: {},
+ selectedSuite: {},
+ isLoading: false,
+ };
+
+ beforeEach(() => {
+ mockState = defaultState;
+ });
+
+ describe('set endpoint', () => {
+ it('should set endpoint', () => {
+ const expectedState = Object.assign({}, mockState, { endpoint: 'foo' });
+ mutations[types.SET_ENDPOINT](mockState, 'foo');
+
+ expect(mockState.endpoint).toEqual(expectedState.endpoint);
+ });
+ });
+
+ describe('set reports', () => {
+ it('should set testReports', () => {
+ const expectedState = Object.assign({}, mockState, { testReports });
+ mutations[types.SET_REPORTS](mockState, testReports);
+
+ expect(mockState.testReports).toEqual(expectedState.testReports);
+ });
+ });
+
+ describe('set selected suite', () => {
+ it('should set selectedSuite', () => {
+ const expectedState = Object.assign({}, mockState, { selectedSuite: testSuites[0] });
+ mutations[types.SET_SELECTED_SUITE](mockState, testSuites[0]);
+
+ expect(mockState.selectedSuite).toEqual(expectedState.selectedSuite);
+ });
+ });
+
+ describe('toggle loading', () => {
+ it('should set to true', () => {
+ const expectedState = Object.assign({}, mockState, { isLoading: true });
+ mutations[types.TOGGLE_LOADING](mockState);
+
+ expect(mockState.isLoading).toEqual(expectedState.isLoading);
+ });
+
+ it('should toggle back to false', () => {
+ const expectedState = Object.assign({}, mockState, { isLoading: false });
+ mockState.isLoading = true;
+
+ mutations[types.TOGGLE_LOADING](mockState);
+
+ expect(mockState.isLoading).toEqual(expectedState.isLoading);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
new file mode 100644
index 00000000000..4d6422745a9
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -0,0 +1,64 @@
+import Vuex from 'vuex';
+import TestReports from '~/pipelines/components/test_reports/test_reports.vue';
+import { shallowMount } from '@vue/test-utils';
+import { testReports } from './mock_data';
+import * as actions from '~/pipelines/stores/test_reports/actions';
+
+describe('Test reports app', () => {
+ let wrapper;
+ let store;
+
+ const loadingSpinner = () => wrapper.find('.js-loading-spinner');
+ const testsDetail = () => wrapper.find('.js-tests-detail');
+ const noTestsToShow = () => wrapper.find('.js-no-tests-to-show');
+
+ const createComponent = (state = {}) => {
+ store = new Vuex.Store({
+ state: {
+ isLoading: false,
+ selectedSuite: {},
+ testReports,
+ ...state,
+ },
+ actions,
+ });
+
+ wrapper = shallowMount(TestReports, {
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when loading', () => {
+ beforeEach(() => createComponent({ isLoading: true }));
+
+ it('shows the loading spinner', () => {
+ expect(noTestsToShow().exists()).toBe(false);
+ expect(testsDetail().exists()).toBe(false);
+ expect(loadingSpinner().exists()).toBe(true);
+ });
+ });
+
+ describe('when the api returns no data', () => {
+ beforeEach(() => createComponent({ testReports: {} }));
+
+ it('displays that there are no tests to show', () => {
+ const noTests = noTestsToShow();
+
+ expect(noTests.exists()).toBe(true);
+ expect(noTests.text()).toBe('There are no tests to show.');
+ });
+ });
+
+ describe('when the api returns data', () => {
+ beforeEach(() => createComponent());
+
+ it('sets testReports and shows tests', () => {
+ expect(wrapper.vm.testReports).toBeTruthy();
+ expect(wrapper.vm.showTests).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
new file mode 100644
index 00000000000..b4305719ea8
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -0,0 +1,77 @@
+import Vuex from 'vuex';
+import SuiteTable from '~/pipelines/components/test_reports/test_suite_table.vue';
+import * as getters from '~/pipelines/stores/test_reports/getters';
+import { TestStatus } from '~/pipelines/constants';
+import { shallowMount } from '@vue/test-utils';
+import { testSuites, testCases } from './mock_data';
+
+describe('Test reports suite table', () => {
+ let wrapper;
+ let store;
+
+ const noCasesMessage = () => wrapper.find('.js-no-test-cases');
+ const allCaseRows = () => wrapper.findAll('.js-case-row');
+ const findCaseRowAtIndex = index => wrapper.findAll('.js-case-row').at(index);
+ const findIconForRow = (row, status) => row.find(`.ci-status-icon-${status}`);
+
+ const createComponent = (suite = testSuites[0]) => {
+ store = new Vuex.Store({
+ state: {
+ selectedSuite: suite,
+ },
+ getters,
+ });
+
+ wrapper = shallowMount(SuiteTable, {
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('should not render', () => {
+ beforeEach(() => createComponent([]));
+
+ it('a table when there are no test cases', () => {
+ expect(noCasesMessage().exists()).toBe(true);
+ });
+ });
+
+ describe('when a test suite is supplied', () => {
+ beforeEach(() => createComponent());
+
+ it('renders the correct number of rows', () => {
+ expect(allCaseRows().length).toBe(testCases.length);
+ });
+
+ it('renders the failed tests first', () => {
+ const failedCaseNames = testCases
+ .filter(x => x.status === TestStatus.FAILED)
+ .map(x => x.name);
+
+ const skippedCaseNames = testCases
+ .filter(x => x.status === TestStatus.SKIPPED)
+ .map(x => x.name);
+
+ expect(findCaseRowAtIndex(0).text()).toContain(failedCaseNames[0]);
+ expect(findCaseRowAtIndex(1).text()).toContain(failedCaseNames[1]);
+ expect(findCaseRowAtIndex(2).text()).toContain(skippedCaseNames[0]);
+ });
+
+ it('renders the correct icon for each status', () => {
+ const failedTest = testCases.findIndex(x => x.status === TestStatus.FAILED);
+ const skippedTest = testCases.findIndex(x => x.status === TestStatus.SKIPPED);
+ const successTest = testCases.findIndex(x => x.status === TestStatus.SUCCESS);
+
+ const failedRow = findCaseRowAtIndex(failedTest);
+ const skippedRow = findCaseRowAtIndex(skippedTest);
+ const successRow = findCaseRowAtIndex(successTest);
+
+ expect(findIconForRow(failedRow, TestStatus.FAILED).exists()).toBe(true);
+ expect(findIconForRow(skippedRow, TestStatus.SKIPPED).exists()).toBe(true);
+ expect(findIconForRow(successRow, TestStatus.SUCCESS).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_summary_spec.js b/spec/frontend/pipelines/test_reports/test_summary_spec.js
new file mode 100644
index 00000000000..19a7755dbdc
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/test_summary_spec.js
@@ -0,0 +1,78 @@
+import Summary from '~/pipelines/components/test_reports/test_summary.vue';
+import { mount } from '@vue/test-utils';
+import { testSuites } from './mock_data';
+
+describe('Test reports summary', () => {
+ let wrapper;
+
+ const backButton = () => wrapper.find('.js-back-button');
+ const totalTests = () => wrapper.find('.js-total-tests');
+ const failedTests = () => wrapper.find('.js-failed-tests');
+ const erroredTests = () => wrapper.find('.js-errored-tests');
+ const successRate = () => wrapper.find('.js-success-rate');
+ const duration = () => wrapper.find('.js-duration');
+
+ const defaultProps = {
+ report: testSuites[0],
+ showBack: false,
+ };
+
+ const createComponent = props => {
+ wrapper = mount(Summary, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ describe('should not render', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('a back button by default', () => {
+ expect(backButton().exists()).toBe(false);
+ });
+ });
+
+ describe('should render', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('a back button and emit on-back-click event', () => {
+ createComponent({
+ showBack: true,
+ });
+
+ expect(backButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when a report is supplied', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays the correct total', () => {
+ expect(totalTests().text()).toBe('4 jobs');
+ });
+
+ it('displays the correct failure count', () => {
+ expect(failedTests().text()).toBe('2 failures');
+ });
+
+ it('displays the correct error count', () => {
+ expect(erroredTests().text()).toBe('0 errors');
+ });
+
+ it('calculates and displays percentages correctly', () => {
+ expect(successRate().text()).toBe('50% success rate');
+ });
+
+ it('displays the correctly formatted duration', () => {
+ expect(duration().text()).toBe('00:01:00');
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
new file mode 100644
index 00000000000..e7599d5cdbc
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
@@ -0,0 +1,54 @@
+import Vuex from 'vuex';
+import SummaryTable from '~/pipelines/components/test_reports/test_summary_table.vue';
+import * as getters from '~/pipelines/stores/test_reports/getters';
+import { mount, createLocalVue } from '@vue/test-utils';
+import { testReports, testReportsWithNoSuites } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Test reports summary table', () => {
+ let wrapper;
+ let store;
+
+ const allSuitesRows = () => wrapper.findAll('.js-suite-row');
+ const noSuitesToShow = () => wrapper.find('.js-no-tests-suites');
+
+ const defaultProps = {
+ testReports,
+ };
+
+ const createComponent = (reports = null) => {
+ store = new Vuex.Store({
+ state: {
+ testReports: reports || testReports,
+ },
+ getters,
+ });
+
+ wrapper = mount(SummaryTable, {
+ propsData: defaultProps,
+ store,
+ localVue,
+ });
+ };
+
+ describe('when test reports are supplied', () => {
+ beforeEach(() => createComponent());
+
+ it('renders the correct number of rows', () => {
+ expect(noSuitesToShow().exists()).toBe(false);
+ expect(allSuitesRows().length).toBe(testReports.test_suites.length);
+ });
+ });
+
+ describe('when there are no test suites', () => {
+ beforeEach(() => {
+ createComponent({ testReportsWithNoSuites });
+ });
+
+ it('displays the no suites to show message', () => {
+ expect(noSuitesToShow().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/project_find_file_spec.js b/spec/frontend/project_find_file_spec.js
index 8102033139f..e60f9f62747 100644
--- a/spec/frontend/project_find_file_spec.js
+++ b/spec/frontend/project_find_file_spec.js
@@ -3,6 +3,9 @@ import $ from 'jquery';
import ProjectFindFile from '~/project_find_file';
import axios from '~/lib/utils/axios_utils';
import { TEST_HOST } from 'helpers/test_constants';
+import sanitize from 'sanitize-html';
+
+jest.mock('sanitize-html', () => jest.fn(val => val));
const BLOB_URL_TEMPLATE = `${TEST_HOST}/namespace/project/blob/master`;
const FILE_FIND_URL = `${TEST_HOST}/namespace/project/files/master?format=json`;
@@ -38,31 +41,31 @@ describe('ProjectFindFile', () => {
href: el.querySelector('a').href,
}));
+ const files = [
+ 'fileA.txt',
+ 'fileB.txt',
+ 'fi#leC.txt',
+ 'folderA/fileD.txt',
+ 'folder#B/fileE.txt',
+ 'folde?rC/fil#F.txt',
+ ];
+
beforeEach(() => {
// Create a mock adapter for stubbing axios API requests
mock = new MockAdapter(axios);
element = $(TEMPLATE);
+ mock.onGet(FILE_FIND_URL).replyOnce(200, files);
+ getProjectFindFileInstance(); // This triggers a load / axios call + subsequent render in the constructor
});
afterEach(() => {
// Reset the mock adapter
mock.restore();
+ sanitize.mockClear();
});
it('loads and renders elements from remote server', done => {
- const files = [
- 'fileA.txt',
- 'fileB.txt',
- 'fi#leC.txt',
- 'folderA/fileD.txt',
- 'folder#B/fileE.txt',
- 'folde?rC/fil#F.txt',
- ];
- mock.onGet(FILE_FIND_URL).replyOnce(200, files);
-
- getProjectFindFileInstance(); // This triggers a load / axios call + subsequent render in the constructor
-
setImmediate(() => {
expect(findFiles()).toEqual(
files.map(text => ({
@@ -74,4 +77,14 @@ describe('ProjectFindFile', () => {
done();
});
});
+
+ it('sanitizes search text', done => {
+ const searchText = element.find('.file-finder-input').val();
+
+ setImmediate(() => {
+ expect(sanitize).toHaveBeenCalledTimes(1);
+ expect(sanitize).toHaveBeenCalledWith(searchText);
+ done();
+ });
+ });
});
diff --git a/spec/frontend/registry/components/collapsible_container_spec.js b/spec/frontend/registry/components/collapsible_container_spec.js
index f93ebab1a4d..d035055afd3 100644
--- a/spec/frontend/registry/components/collapsible_container_spec.js
+++ b/spec/frontend/registry/components/collapsible_container_spec.js
@@ -1,10 +1,11 @@
import Vue from 'vue';
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
-import collapsibleComponent from '~/registry/components/collapsible_container.vue';
-import { repoPropsData } from '../mock_data';
import createFlash from '~/flash';
+import Tracking from '~/tracking';
+import collapsibleComponent from '~/registry/components/collapsible_container.vue';
import * as getters from '~/registry/stores/getters';
+import { repoPropsData } from '../mock_data';
jest.mock('~/flash.js');
@@ -16,9 +17,10 @@ describe('collapsible registry container', () => {
let wrapper;
let store;
- const findDeleteBtn = w => w.find('.js-remove-repo');
- const findContainerImageTags = w => w.find('.container-image-tags');
- const findToggleRepos = w => w.findAll('.js-toggle-repo');
+ const findDeleteBtn = (w = wrapper) => w.find('.js-remove-repo');
+ const findContainerImageTags = (w = wrapper) => w.find('.container-image-tags');
+ const findToggleRepos = (w = wrapper) => w.findAll('.js-toggle-repo');
+ const findDeleteModal = (w = wrapper) => w.find({ ref: 'deleteModal' });
const mountWithStore = config => mount(collapsibleComponent, { ...config, store, localVue });
@@ -124,4 +126,45 @@ describe('collapsible registry container', () => {
expect(deleteBtn.exists()).toBe(false);
});
});
+
+ describe('tracking', () => {
+ const category = 'mock_page';
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ wrapper.vm.deleteItem = jest.fn().mockResolvedValue();
+ wrapper.vm.fetchRepos = jest.fn();
+ wrapper.setData({
+ tracking: {
+ ...wrapper.vm.tracking,
+ category,
+ },
+ });
+ });
+
+ it('send an event when delete button is clicked', () => {
+ const deleteBtn = findDeleteBtn();
+ deleteBtn.trigger('click');
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'click_button', {
+ label: 'registry_repository_delete',
+ category,
+ });
+ });
+ it('send an event when cancel is pressed on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('cancel');
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'cancel_delete', {
+ label: 'registry_repository_delete',
+ category,
+ });
+ });
+ it('send an event when confirm is clicked on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('ok');
+
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'confirm_delete', {
+ label: 'registry_repository_delete',
+ category,
+ });
+ });
+ });
});
diff --git a/spec/frontend/registry/components/table_registry_spec.js b/spec/frontend/registry/components/table_registry_spec.js
index 7cb7c012d9d..ab88caf44e1 100644
--- a/spec/frontend/registry/components/table_registry_spec.js
+++ b/spec/frontend/registry/components/table_registry_spec.js
@@ -1,10 +1,14 @@
import Vue from 'vue';
import Vuex from 'vuex';
-import tableRegistry from '~/registry/components/table_registry.vue';
import { mount, createLocalVue } from '@vue/test-utils';
+import createFlash from '~/flash';
+import Tracking from '~/tracking';
+import tableRegistry from '~/registry/components/table_registry.vue';
import { repoPropsData } from '../mock_data';
import * as getters from '~/registry/stores/getters';
+jest.mock('~/flash');
+
const [firstImage, secondImage] = repoPropsData.list;
const localVue = createLocalVue();
@@ -15,11 +19,12 @@ describe('table registry', () => {
let wrapper;
let store;
- const findSelectAllCheckbox = w => w.find('.js-select-all-checkbox > input');
- const findSelectCheckboxes = w => w.findAll('.js-select-checkbox > input');
- const findDeleteButton = w => w.find('.js-delete-registry');
- const findDeleteButtonsRow = w => w.findAll('.js-delete-registry-row');
- const findPagination = w => w.find('.js-registry-pagination');
+ const findSelectAllCheckbox = (w = wrapper) => w.find('.js-select-all-checkbox > input');
+ const findSelectCheckboxes = (w = wrapper) => w.findAll('.js-select-checkbox > input');
+ const findDeleteButton = (w = wrapper) => w.find({ ref: 'bulkDeleteButton' });
+ const findDeleteButtonsRow = (w = wrapper) => w.findAll('.js-delete-registry-row');
+ const findPagination = (w = wrapper) => w.find('.js-registry-pagination');
+ const findDeleteModal = (w = wrapper) => w.find({ ref: 'deleteModal' });
const bulkDeletePath = 'path';
const mountWithStore = config => mount(tableRegistry, { ...config, store, localVue });
@@ -139,7 +144,7 @@ describe('table registry', () => {
},
});
wrapper.vm.handleMultipleDelete();
- expect(wrapper.vm.showError).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
});
});
@@ -169,6 +174,27 @@ describe('table registry', () => {
});
});
+ describe('modal event handlers', () => {
+ beforeEach(() => {
+ wrapper.vm.handleSingleDelete = jest.fn();
+ wrapper.vm.handleMultipleDelete = jest.fn();
+ });
+ it('on ok when one item is selected should call singleDelete', () => {
+ wrapper.setData({ itemsToBeDeleted: [0] });
+ wrapper.vm.onDeletionConfirmed();
+
+ expect(wrapper.vm.handleSingleDelete).toHaveBeenCalledWith(repoPropsData.list[0]);
+ expect(wrapper.vm.handleMultipleDelete).not.toHaveBeenCalled();
+ });
+ it('on ok when multiple items are selected should call muultiDelete', () => {
+ wrapper.setData({ itemsToBeDeleted: [0, 1, 2] });
+ wrapper.vm.onDeletionConfirmed();
+
+ expect(wrapper.vm.handleMultipleDelete).toHaveBeenCalled();
+ expect(wrapper.vm.handleSingleDelete).not.toHaveBeenCalled();
+ });
+ });
+
describe('pagination', () => {
const repo = {
repoPropsData,
@@ -265,4 +291,83 @@ describe('table registry', () => {
expect(deleteBtns.length).toBe(0);
});
});
+
+ describe('event tracking', () => {
+ const mockPageName = 'mock_page';
+
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ wrapper.vm.handleSingleDelete = jest.fn();
+ wrapper.vm.handleMultipleDelete = jest.fn();
+ document.body.dataset.page = mockPageName;
+ });
+
+ afterEach(() => {
+ document.body.dataset.page = null;
+ });
+
+ describe('single tag delete', () => {
+ beforeEach(() => {
+ wrapper.setData({ itemsToBeDeleted: [0] });
+ });
+
+ it('send an event when delete button is clicked', () => {
+ const deleteBtn = findDeleteButtonsRow();
+ deleteBtn.at(0).trigger('click');
+ expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'click_button', {
+ label: 'registry_tag_delete',
+ property: 'foo',
+ });
+ });
+ it('send an event when cancel is pressed on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('cancel');
+ expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'cancel_delete', {
+ label: 'registry_tag_delete',
+ property: 'foo',
+ });
+ });
+ it('send an event when confirm is clicked on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('ok');
+
+ expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'confirm_delete', {
+ label: 'registry_tag_delete',
+ property: 'foo',
+ });
+ });
+ });
+ describe('bulk tag delete', () => {
+ beforeEach(() => {
+ const items = [0, 1, 2];
+ wrapper.setData({ itemsToBeDeleted: items, selectedItems: items });
+ });
+
+ it('send an event when delete button is clicked', () => {
+ const deleteBtn = findDeleteButton();
+ deleteBtn.vm.$emit('click');
+ expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'click_button', {
+ label: 'bulk_registry_tag_delete',
+ property: 'foo',
+ });
+ });
+ it('send an event when cancel is pressed on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('cancel');
+ expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'cancel_delete', {
+ label: 'bulk_registry_tag_delete',
+ property: 'foo',
+ });
+ });
+ it('send an event when confirm is clicked on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('ok');
+
+ expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'confirm_delete', {
+ label: 'bulk_registry_tag_delete',
+ property: 'foo',
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/releases/detail/components/app_spec.js b/spec/frontend/releases/detail/components/app_spec.js
index f8eb33a69a8..4726f18c8fa 100644
--- a/spec/frontend/releases/detail/components/app_spec.js
+++ b/spec/frontend/releases/detail/components/app_spec.js
@@ -8,15 +8,17 @@ describe('Release detail component', () => {
let wrapper;
let releaseClone;
let actions;
+ let state;
beforeEach(() => {
gon.api_version = 'v4';
releaseClone = JSON.parse(JSON.stringify(convertObjectPropsToCamelCase(release)));
- const state = {
+ state = {
release: releaseClone,
markdownDocsPath: 'path/to/markdown/docs',
+ updateReleaseApiDocsPath: 'path/to/update/release/api/docs',
};
actions = {
@@ -46,6 +48,21 @@ describe('Release detail component', () => {
expect(wrapper.find('#git-ref').element.value).toBe(releaseClone.tagName);
});
+ it('renders the correct help text under the "Tag name" field', () => {
+ const helperText = wrapper.find('#tag-name-help');
+ const helperTextLink = helperText.find('a');
+ const helperTextLinkAttrs = helperTextLink.attributes();
+
+ expect(helperText.text()).toBe(
+ 'Changing a Release tag is only supported via Releases API. More information',
+ );
+ expect(helperTextLink.text()).toBe('More information');
+ expect(helperTextLinkAttrs.href).toBe(state.updateReleaseApiDocsPath);
+ expect(helperTextLinkAttrs.rel).toContain('noopener');
+ expect(helperTextLinkAttrs.rel).toContain('noreferrer');
+ expect(helperTextLinkAttrs.target).toBe('_blank');
+ });
+
it('renders the correct release title in the "Release title" field', () => {
expect(wrapper.find('#release-title').element.value).toBe(releaseClone.name);
});
diff --git a/spec/frontend/releases/list/components/__snapshots__/release_block_spec.js.snap b/spec/frontend/releases/list/components/__snapshots__/release_block_spec.js.snap
deleted file mode 100644
index 8f2c0427c83..00000000000
--- a/spec/frontend/releases/list/components/__snapshots__/release_block_spec.js.snap
+++ /dev/null
@@ -1,332 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Release block with default props matches the snapshot 1`] = `
-<div
- class="card release-block"
- id="v0.3"
->
- <div
- class="card-body"
- >
- <div
- class="d-flex align-items-start"
- >
- <h2
- class="card-title mt-0 mr-auto"
- >
-
- New release
-
- <!---->
- </h2>
-
- <a
- class="btn btn-default js-edit-button ml-2"
- data-original-title="Edit this release"
- href="http://0.0.0.0:3001/root/release-test/-/releases/v0.3/edit"
- title=""
- >
- <svg
- aria-hidden="true"
- class="s16 ic-pencil"
- >
- <use
- xlink:href="#pencil"
- />
- </svg>
- </a>
- </div>
-
- <div
- class="card-subtitle d-flex flex-wrap text-secondary"
- >
- <div
- class="append-right-8"
- >
- <svg
- aria-hidden="true"
- class="align-middle s16 ic-commit"
- >
- <use
- xlink:href="#commit"
- />
- </svg>
-
- <span
- data-original-title="Initial commit"
- title=""
- >
- c22b0728
- </span>
- </div>
-
- <div
- class="append-right-8"
- >
- <svg
- aria-hidden="true"
- class="align-middle s16 ic-tag"
- >
- <use
- xlink:href="#tag"
- />
- </svg>
-
- <span
- data-original-title="Tag"
- title=""
- >
- v0.3
- </span>
- </div>
-
- <div
- class="js-milestone-list-label"
- >
- <svg
- aria-hidden="true"
- class="align-middle s16 ic-flag"
- >
- <use
- xlink:href="#flag"
- />
- </svg>
-
- <span
- class="js-label-text"
- >
- Milestones
- </span>
- </div>
-
- <a
- class="append-right-4 prepend-left-4 js-milestone-link"
- data-original-title="The 13.6 milestone!"
- href="http://0.0.0.0:3001/root/release-test/-/milestones/2"
- title=""
- >
-
- 13.6
-
- </a>
-
- •
-
- <a
- class="append-right-4 prepend-left-4 js-milestone-link"
- data-original-title="The 13.5 milestone!"
- href="http://0.0.0.0:3001/root/release-test/-/milestones/1"
- title=""
- >
-
- 13.5
-
- </a>
-
- <!---->
-
- <div
- class="append-right-4"
- >
-
- •
-
- <span
- data-original-title="Aug 26, 2019 5:54pm GMT+0000"
- title=""
- >
-
- released 1 month ago
-
- </span>
- </div>
-
- <div
- class="d-flex"
- >
-
- by
-
- <a
- class="user-avatar-link prepend-left-4"
- href=""
- >
- <span>
- <img
- alt="root's avatar"
- class="avatar s20 "
- data-original-title=""
- data-src="https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon"
- height="20"
- src="https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon"
- title=""
- width="20"
- />
-
- <div
- aria-hidden="true"
- class="js-user-avatar-image-toolip d-none"
- style="display: none;"
- >
- <div>
- root
- </div>
- </div>
- </span>
- <!---->
- </a>
- </div>
- </div>
-
- <div
- class="card-text prepend-top-default"
- >
- <b>
-
- Assets
-
- <span
- class="js-assets-count badge badge-pill"
- >
- 5
- </span>
- </b>
-
- <ul
- class="pl-0 mb-0 prepend-top-8 list-unstyled js-assets-list"
- >
- <li
- class="append-bottom-8"
- >
- <a
- class=""
- data-original-title="Download asset"
- href="https://google.com"
- title=""
- >
- <svg
- aria-hidden="true"
- class="align-middle append-right-4 align-text-bottom s16 ic-package"
- >
- <use
- xlink:href="#package"
- />
- </svg>
-
- my link
-
- <span>
- (external source)
- </span>
- </a>
- </li>
- <li
- class="append-bottom-8"
- >
- <a
- class=""
- data-original-title="Download asset"
- href="https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/artifacts/v11.6.0-rc4/download?job=rspec-mysql+41%2F50"
- title=""
- >
- <svg
- aria-hidden="true"
- class="align-middle append-right-4 align-text-bottom s16 ic-package"
- >
- <use
- xlink:href="#package"
- />
- </svg>
-
- my second link
-
- <!---->
- </a>
- </li>
- </ul>
-
- <div
- class="dropdown"
- >
- <button
- aria-expanded="false"
- aria-haspopup="true"
- class="btn btn-link"
- data-toggle="dropdown"
- type="button"
- >
- <svg
- aria-hidden="true"
- class="align-top append-right-4 s16 ic-doc-code"
- >
- <use
- xlink:href="#doc-code"
- />
- </svg>
-
- Source code
-
- <svg
- aria-hidden="true"
- class="s16 ic-arrow-down"
- >
- <use
- xlink:href="#arrow-down"
- />
- </svg>
- </button>
-
- <div
- class="js-sources-dropdown dropdown-menu"
- >
- <li>
- <a
- class=""
- href="http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.zip"
- >
- Download zip
- </a>
- </li>
- <li>
- <a
- class=""
- href="http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar.gz"
- >
- Download tar.gz
- </a>
- </li>
- <li>
- <a
- class=""
- href="http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar.bz2"
- >
- Download tar.bz2
- </a>
- </li>
- <li>
- <a
- class=""
- href="http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar"
- >
- Download tar
- </a>
- </li>
- </div>
- </div>
- </div>
-
- <div
- class="card-text prepend-top-default"
- >
- <div>
- <p
- data-sourcepos="1:1-1:21"
- dir="auto"
- >
- A super nice release!
- </p>
- </div>
- </div>
- </div>
-</div>
-`;
diff --git a/spec/frontend/releases/list/components/release_block_footer_spec.js b/spec/frontend/releases/list/components/release_block_footer_spec.js
new file mode 100644
index 00000000000..172147f1cc8
--- /dev/null
+++ b/spec/frontend/releases/list/components/release_block_footer_spec.js
@@ -0,0 +1,163 @@
+import { mount } from '@vue/test-utils';
+import ReleaseBlockFooter from '~/releases/list/components/release_block_footer.vue';
+import Icon from '~/vue_shared/components/icon.vue';
+import { GlLink } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
+import { release } from '../../mock_data';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+
+jest.mock('~/vue_shared/mixins/timeago', () => ({
+ methods: {
+ timeFormated() {
+ return '7 fortnightes ago';
+ },
+ tooltipTitle() {
+ return 'February 30, 2401';
+ },
+ },
+}));
+
+describe('Release block footer', () => {
+ let wrapper;
+ let releaseClone;
+
+ const factory = (props = {}) => {
+ wrapper = mount(ReleaseBlockFooter, {
+ propsData: {
+ ...convertObjectPropsToCamelCase(releaseClone),
+ ...props,
+ },
+ sync: false,
+ });
+
+ return wrapper.vm.$nextTick();
+ };
+
+ beforeEach(() => {
+ releaseClone = JSON.parse(JSON.stringify(release));
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const commitInfoSection = () => wrapper.find('.js-commit-info');
+ const commitInfoSectionLink = () => commitInfoSection().find(GlLink);
+ const tagInfoSection = () => wrapper.find('.js-tag-info');
+ const tagInfoSectionLink = () => tagInfoSection().find(GlLink);
+ const authorDateInfoSection = () => wrapper.find('.js-author-date-info');
+
+ describe('with all props provided', () => {
+ beforeEach(() => factory());
+
+ it('renders the commit icon', () => {
+ const commitIcon = commitInfoSection().find(Icon);
+
+ expect(commitIcon.exists()).toBe(true);
+ expect(commitIcon.props('name')).toBe('commit');
+ });
+
+ it('renders the commit SHA with a link', () => {
+ const commitLink = commitInfoSectionLink();
+
+ expect(commitLink.exists()).toBe(true);
+ expect(commitLink.text()).toBe(releaseClone.commit.short_id);
+ expect(commitLink.attributes('href')).toBe(releaseClone.commit_path);
+ });
+
+ it('renders the tag icon', () => {
+ const commitIcon = tagInfoSection().find(Icon);
+
+ expect(commitIcon.exists()).toBe(true);
+ expect(commitIcon.props('name')).toBe('tag');
+ });
+
+ it('renders the tag name with a link', () => {
+ const commitLink = tagInfoSection().find(GlLink);
+
+ expect(commitLink.exists()).toBe(true);
+ expect(commitLink.text()).toBe(releaseClone.tag_name);
+ expect(commitLink.attributes('href')).toBe(releaseClone.tag_path);
+ });
+
+ it('renders the author and creation time info', () => {
+ expect(trimText(authorDateInfoSection().text())).toBe(
+ `Created 7 fortnightes ago by ${releaseClone.author.username}`,
+ );
+ });
+
+ it("renders the author's avatar image", () => {
+ const avatarImg = authorDateInfoSection().find('img');
+
+ expect(avatarImg.exists()).toBe(true);
+ expect(avatarImg.attributes('src')).toBe(releaseClone.author.avatar_url);
+ });
+
+ it("renders a link to the author's profile", () => {
+ const authorLink = authorDateInfoSection().find(GlLink);
+
+ expect(authorLink.exists()).toBe(true);
+ expect(authorLink.attributes('href')).toBe(releaseClone.author.web_url);
+ });
+ });
+
+ describe('without any commit info', () => {
+ beforeEach(() => factory({ commit: undefined }));
+
+ it('does not render any commit info', () => {
+ expect(commitInfoSection().exists()).toBe(false);
+ });
+ });
+
+ describe('without a commit URL', () => {
+ beforeEach(() => factory({ commitPath: undefined }));
+
+ it('renders the commit SHA as plain text (instead of a link)', () => {
+ expect(commitInfoSectionLink().exists()).toBe(false);
+ expect(commitInfoSection().text()).toBe(releaseClone.commit.short_id);
+ });
+ });
+
+ describe('without a tag name', () => {
+ beforeEach(() => factory({ tagName: undefined }));
+
+ it('does not render any tag info', () => {
+ expect(tagInfoSection().exists()).toBe(false);
+ });
+ });
+
+ describe('without a tag URL', () => {
+ beforeEach(() => factory({ tagPath: undefined }));
+
+ it('renders the tag name as plain text (instead of a link)', () => {
+ expect(tagInfoSectionLink().exists()).toBe(false);
+ expect(tagInfoSection().text()).toBe(releaseClone.tag_name);
+ });
+ });
+
+ describe('without any author info', () => {
+ beforeEach(() => factory({ author: undefined }));
+
+ it('renders the release date without the author name', () => {
+ expect(trimText(authorDateInfoSection().text())).toBe('Created 7 fortnightes ago');
+ });
+ });
+
+ describe('without a released at date', () => {
+ beforeEach(() => factory({ releasedAt: undefined }));
+
+ it('renders the author name without the release date', () => {
+ expect(trimText(authorDateInfoSection().text())).toBe(
+ `Created by ${releaseClone.author.username}`,
+ );
+ });
+ });
+
+ describe('without a release date or author info', () => {
+ beforeEach(() => factory({ author: undefined, releasedAt: undefined }));
+
+ it('does not render any author or release date info', () => {
+ expect(authorDateInfoSection().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/releases/list/components/release_block_spec.js b/spec/frontend/releases/list/components/release_block_spec.js
index ac51c3af11a..b63ef068d8e 100644
--- a/spec/frontend/releases/list/components/release_block_spec.js
+++ b/spec/frontend/releases/list/components/release_block_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import ReleaseBlock from '~/releases/list/components/release_block.vue';
+import ReleaseBlockFooter from '~/releases/list/components/release_block_footer.vue';
import timeagoMixin from '~/vue_shared/mixins/timeago';
import { first } from 'underscore';
import { release } from '../../mock_data';
@@ -21,14 +22,16 @@ describe('Release block', () => {
let wrapper;
let releaseClone;
- const factory = (releaseProp, releaseEditPageFeatureFlag = true) => {
+ const factory = (releaseProp, featureFlags = {}) => {
wrapper = mount(ReleaseBlock, {
propsData: {
release: releaseProp,
},
provide: {
glFeatures: {
- releaseEditPage: releaseEditPageFeatureFlag,
+ releaseEditPage: true,
+ releaseIssueSummary: true,
+ ...featureFlags,
},
},
sync: false,
@@ -39,41 +42,25 @@ describe('Release block', () => {
const milestoneListLabel = () => wrapper.find('.js-milestone-list-label');
const editButton = () => wrapper.find('.js-edit-button');
- const RealDate = Date;
beforeEach(() => {
- // timeago.js calls Date(), so let's mock that case to avoid time-dependent test failures.
- const constantDate = new Date('2019-10-25T00:12:00');
-
- /* eslint no-global-assign:off */
- global.Date = jest.fn((...props) =>
- props.length ? new RealDate(...props) : new RealDate(constantDate),
- );
-
- Object.assign(Date, RealDate);
-
releaseClone = JSON.parse(JSON.stringify(release));
});
afterEach(() => {
wrapper.destroy();
- global.Date = RealDate;
});
describe('with default props', () => {
beforeEach(() => factory(release));
- it('matches the snapshot', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
it("renders the block with an id equal to the release's tag name", () => {
expect(wrapper.attributes().id).toBe('v0.3');
});
it('renders an edit button that links to the "Edit release" page', () => {
expect(editButton().exists()).toBe(true);
- expect(editButton().attributes('href')).toBe(release._links.edit);
+ expect(editButton().attributes('href')).toBe(release._links.edit_url);
});
it('renders release name', () => {
@@ -158,6 +145,10 @@ describe('Release block', () => {
expect(milestoneLink.attributes('data-original-title')).toBe(milestone.description);
});
+
+ it('renders the footer', () => {
+ expect(wrapper.find(ReleaseBlockFooter).exists()).toBe(true);
+ });
});
it('renders commit sha', () => {
@@ -180,7 +171,7 @@ describe('Release block', () => {
});
});
- it("does not render an edit button if release._links.edit isn't a string", () => {
+ it("does not render an edit button if release._links.edit_url isn't a string", () => {
delete releaseClone._links;
return factory(releaseClone).then(() => {
@@ -189,7 +180,7 @@ describe('Release block', () => {
});
it('does not render an edit button if the releaseEditPage feature flag is disabled', () =>
- factory(releaseClone, false).then(() => {
+ factory(releaseClone, { releaseEditPage: false }).then(() => {
expect(editButton().exists()).toBe(false);
}));
diff --git a/spec/frontend/releases/mock_data.js b/spec/frontend/releases/mock_data.js
index b2ebf1174d4..61d95b86b1c 100644
--- a/spec/frontend/releases/mock_data.js
+++ b/spec/frontend/releases/mock_data.js
@@ -30,6 +30,7 @@ export const milestones = [
export const release = {
name: 'New release',
tag_name: 'v0.3',
+ tag_path: '/root/release-test/-/tags/v0.3',
description: 'A super nice release!',
description_html: '<p data-sourcepos="1:1-1:21" dir="auto">A super nice release!</p>',
created_at: '2019-08-26T17:54:04.952Z',
@@ -56,6 +57,7 @@ export const release = {
committer_email: 'admin@example.com',
committed_date: '2019-08-26T17:47:07.000Z',
},
+ commit_path: '/root/release-test/commit/c22b0728d1b465f82898c884d32b01aa642f96c1',
upcoming_release: false,
milestones,
assets: {
@@ -95,6 +97,6 @@ export const release = {
],
},
_links: {
- edit: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3/edit',
+ edit_url: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3/edit',
},
};
diff --git a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
new file mode 100644
index 00000000000..31a1cd23060
--- /dev/null
+++ b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
@@ -0,0 +1,75 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Repository directory download links component renders downloads links for path app 1`] = `
+<section
+ class="border-top pt-1 mt-1"
+>
+ <h5
+ class="m-0 dropdown-bold-header"
+ >
+ Download this directory
+ </h5>
+
+ <div
+ class="dropdown-menu-content"
+ >
+ <div
+ class="btn-group ml-0 w-100"
+ >
+ <gllink-stub
+ class="btn btn-xs btn-primary"
+ href="http://test.com/?path=app"
+ >
+
+ zip
+
+ </gllink-stub>
+ <gllink-stub
+ class="btn btn-xs"
+ href="http://test.com/?path=app"
+ >
+
+ tar
+
+ </gllink-stub>
+ </div>
+ </div>
+</section>
+`;
+
+exports[`Repository directory download links component renders downloads links for path app/assets 1`] = `
+<section
+ class="border-top pt-1 mt-1"
+>
+ <h5
+ class="m-0 dropdown-bold-header"
+ >
+ Download this directory
+ </h5>
+
+ <div
+ class="dropdown-menu-content"
+ >
+ <div
+ class="btn-group ml-0 w-100"
+ >
+ <gllink-stub
+ class="btn btn-xs btn-primary"
+ href="http://test.com/?path=app/assets"
+ >
+
+ zip
+
+ </gllink-stub>
+ <gllink-stub
+ class="btn btn-xs"
+ href="http://test.com/?path=app/assets"
+ >
+
+ tar
+
+ </gllink-stub>
+ </div>
+ </div>
+</section>
+`;
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 08173f4f0c4..706c26403c0 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -62,19 +62,23 @@ exports[`Repository last commit component renders commit widget 1`] = `
>
<!---->
- <gllink-stub
- class="js-commit-pipeline"
- data-original-title="Commit: failed"
- href="https://test.com/pipeline"
- title=""
+ <div
+ class="ci-status-link"
>
- <ciicon-stub
- aria-label="Commit: failed"
- cssclasses=""
- size="24"
- status="[object Object]"
- />
- </gllink-stub>
+ <gllink-stub
+ class="js-commit-pipeline"
+ data-original-title="Commit: failed"
+ href="https://test.com/pipeline"
+ title=""
+ >
+ <ciicon-stub
+ aria-label="Commit: failed"
+ cssclasses=""
+ size="24"
+ status="[object Object]"
+ />
+ </gllink-stub>
+ </div>
<div
class="commit-sha-group d-flex"
@@ -165,19 +169,23 @@ exports[`Repository last commit component renders the signature HTML as returned
</button>
</div>
- <gllink-stub
- class="js-commit-pipeline"
- data-original-title="Commit: failed"
- href="https://test.com/pipeline"
- title=""
+ <div
+ class="ci-status-link"
>
- <ciicon-stub
- aria-label="Commit: failed"
- cssclasses=""
- size="24"
- status="[object Object]"
- />
- </gllink-stub>
+ <gllink-stub
+ class="js-commit-pipeline"
+ data-original-title="Commit: failed"
+ href="https://test.com/pipeline"
+ title=""
+ >
+ <ciicon-stub
+ aria-label="Commit: failed"
+ cssclasses=""
+ size="24"
+ status="[object Object]"
+ />
+ </gllink-stub>
+ </div>
<div
class="commit-sha-group d-flex"
diff --git a/spec/frontend/repository/components/directory_download_links_spec.js b/spec/frontend/repository/components/directory_download_links_spec.js
new file mode 100644
index 00000000000..4d70b44de08
--- /dev/null
+++ b/spec/frontend/repository/components/directory_download_links_spec.js
@@ -0,0 +1,29 @@
+import { shallowMount } from '@vue/test-utils';
+import DirectoryDownloadLinks from '~/repository/components/directory_download_links.vue';
+
+let vm;
+
+function factory(currentPath) {
+ vm = shallowMount(DirectoryDownloadLinks, {
+ propsData: {
+ currentPath,
+ links: [{ text: 'zip', path: 'http://test.com/' }, { text: 'tar', path: 'http://test.com/' }],
+ },
+ });
+}
+
+describe('Repository directory download links component', () => {
+ afterEach(() => {
+ vm.destroy();
+ });
+
+ it.each`
+ path
+ ${'app'}
+ ${'app/assets'}
+ `('renders downloads links for path $path', ({ path }) => {
+ factory(path);
+
+ expect(vm.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index 01b56d453e6..e07ad4cf46b 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -17,7 +17,7 @@ function createCommitData(data = {}) {
avatarUrl: 'https://test.com',
webUrl: 'https://test.com/test',
},
- latestPipeline: {
+ pipeline: {
detailedStatus: {
detailsPath: 'https://test.com/pipeline',
icon: 'failed',
@@ -74,7 +74,7 @@ describe('Repository last commit component', () => {
});
it('hides pipeline components when pipeline does not exist', () => {
- factory(createCommitData({ latestPipeline: null }));
+ factory(createCommitData({ pipeline: null }));
expect(vm.find('.js-commit-pipeline').exists()).toBe(false);
});
diff --git a/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap b/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
new file mode 100644
index 00000000000..a5e3eb4bce1
--- /dev/null
+++ b/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
@@ -0,0 +1,36 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Repository file preview component renders file HTML 1`] = `
+<article
+ class="file-holder limited-width-container readme-holder"
+>
+ <div
+ class="file-title"
+ >
+ <i
+ aria-hidden="true"
+ class="fa fa-file-text-o fa-fw"
+ />
+
+ <gllink-stub
+ href="http://test.com"
+ >
+ <strong>
+ README.md
+ </strong>
+ </gllink-stub>
+ </div>
+
+ <div
+ class="blob-viewer"
+ >
+ <div>
+ <div
+ class="blob"
+ >
+ test
+ </div>
+ </div>
+ </div>
+</article>
+`;
diff --git a/spec/frontend/repository/components/preview/index_spec.js b/spec/frontend/repository/components/preview/index_spec.js
new file mode 100644
index 00000000000..0112e6310f4
--- /dev/null
+++ b/spec/frontend/repository/components/preview/index_spec.js
@@ -0,0 +1,49 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import Preview from '~/repository/components/preview/index.vue';
+
+let vm;
+let $apollo;
+
+function factory(blob) {
+ $apollo = {
+ query: jest.fn().mockReturnValue(Promise.resolve({})),
+ };
+
+ vm = shallowMount(Preview, {
+ propsData: {
+ blob,
+ },
+ mocks: {
+ $apollo,
+ },
+ });
+}
+
+describe('Repository file preview component', () => {
+ afterEach(() => {
+ vm.destroy();
+ });
+
+ it('renders file HTML', () => {
+ factory({
+ webUrl: 'http://test.com',
+ name: 'README.md',
+ });
+
+ vm.setData({ readme: { html: '<div class="blob">test</div>' } });
+
+ expect(vm.element).toMatchSnapshot();
+ });
+
+ it('renders loading icon', () => {
+ factory({
+ webUrl: 'http://test.com',
+ name: 'README.md',
+ });
+
+ vm.setData({ loading: 1 });
+
+ expect(vm.find(GlLoadingIcon).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index d55dc553031..f8e65a51297 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -25,6 +25,8 @@ exports[`Repository table row component renders table row 1`] = `
<!---->
<!---->
+
+ <!---->
</td>
<td
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index 827927e6d9a..41450becabb 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -1,18 +1,36 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlSkeletonLoading } from '@gitlab/ui';
import Table from '~/repository/components/table/index.vue';
+import TableRow from '~/repository/components/table/row.vue';
let vm;
let $apollo;
-function factory(path, data = () => ({})) {
- $apollo = {
- query: jest.fn().mockReturnValue(Promise.resolve({ data: data() })),
- };
-
+const MOCK_BLOBS = [
+ {
+ id: '123abc',
+ sha: '123abc',
+ flatPath: 'blob',
+ name: 'blob.md',
+ type: 'blob',
+ webUrl: 'http://test.com',
+ },
+ {
+ id: '124abc',
+ sha: '124abc',
+ flatPath: 'blob2',
+ name: 'blob2.md',
+ type: 'blob',
+ webUrl: 'http://test.com',
+ },
+];
+
+function factory({ path, isLoading = false, entries = {} }) {
vm = shallowMount(Table, {
propsData: {
path,
+ isLoading,
+ entries,
},
mocks: {
$apollo,
@@ -31,50 +49,30 @@ describe('Repository table component', () => {
${'app/assets'} | ${'master'}
${'/'} | ${'test'}
`('renders table caption for $ref in $path', ({ path, ref }) => {
- factory(path);
+ factory({ path });
vm.setData({ ref });
- expect(vm.find('caption').text()).toEqual(
+ expect(vm.find('.table').attributes('aria-label')).toEqual(
`Files, directories, and submodules in the path ${path} for commit reference ${ref}`,
);
});
it('shows loading icon', () => {
- factory('/');
-
- vm.setData({ isLoadingFiles: true });
+ factory({ path: '/', isLoading: true });
- expect(vm.find(GlLoadingIcon).isVisible()).toBe(true);
+ expect(vm.find(GlSkeletonLoading).exists()).toBe(true);
});
- describe('normalizeData', () => {
- it('normalizes edge nodes', () => {
- const output = vm.vm.normalizeData('blobs', [{ node: '1' }, { node: '2' }]);
-
- expect(output).toEqual(['1', '2']);
+ it('renders table rows', () => {
+ factory({
+ path: '/',
+ entries: {
+ blobs: MOCK_BLOBS,
+ },
});
- });
-
- describe('hasNextPage', () => {
- it('returns undefined when hasNextPage is false', () => {
- const output = vm.vm.hasNextPage({
- trees: { pageInfo: { hasNextPage: false } },
- submodules: { pageInfo: { hasNextPage: false } },
- blobs: { pageInfo: { hasNextPage: false } },
- });
- expect(output).toBe(undefined);
- });
-
- it('returns pageInfo object when hasNextPage is true', () => {
- const output = vm.vm.hasNextPage({
- trees: { pageInfo: { hasNextPage: false } },
- submodules: { pageInfo: { hasNextPage: false } },
- blobs: { pageInfo: { hasNextPage: true, nextCursor: 'test' } },
- });
-
- expect(output).toEqual({ hasNextPage: true, nextCursor: 'test' });
- });
+ expect(vm.find(TableRow).exists()).toBe(true);
+ expect(vm.findAll(TableRow).length).toBe(2);
});
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index e539c560975..aa0b9385f1a 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -2,6 +2,7 @@ import { shallowMount, RouterLinkStub } from '@vue/test-utils';
import { GlBadge, GlLink } from '@gitlab/ui';
import { visitUrl } from '~/lib/utils/url_utility';
import TableRow from '~/repository/components/table/row.vue';
+import Icon from '~/vue_shared/components/icon.vue';
jest.mock('~/lib/utils/url_utility');
@@ -40,6 +41,7 @@ describe('Repository table row component', () => {
it('renders table row', () => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type: 'file',
currentPath: '/',
@@ -56,6 +58,7 @@ describe('Repository table row component', () => {
`('renders a $componentName for type $type', ({ type, component }) => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type,
currentPath: '/',
@@ -72,6 +75,7 @@ describe('Repository table row component', () => {
`('pushes new router if type $type is tree', ({ type, pushes }) => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type,
currentPath: '/',
@@ -94,6 +98,7 @@ describe('Repository table row component', () => {
`('calls visitUrl if $type is not tree', ({ type, pushes }) => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type,
currentPath: '/',
@@ -104,13 +109,14 @@ describe('Repository table row component', () => {
if (pushes) {
expect(visitUrl).not.toHaveBeenCalled();
} else {
- expect(visitUrl).toHaveBeenCalledWith('https://test.com');
+ expect(visitUrl).toHaveBeenCalledWith('https://test.com', undefined);
}
});
it('renders commit ID for submodule', () => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type: 'commit',
currentPath: '/',
@@ -122,6 +128,7 @@ describe('Repository table row component', () => {
it('renders link with href', () => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type: 'blob',
url: 'https://test.com',
@@ -134,6 +141,7 @@ describe('Repository table row component', () => {
it('renders LFS badge', () => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type: 'commit',
currentPath: '/',
@@ -146,6 +154,7 @@ describe('Repository table row component', () => {
it('renders commit and web links with href for submodule', () => {
factory({
id: '1',
+ sha: '123',
path: 'test',
type: 'commit',
url: 'https://test.com',
@@ -156,4 +165,18 @@ describe('Repository table row component', () => {
expect(vm.find('a').attributes('href')).toEqual('https://test.com');
expect(vm.find(GlLink).attributes('href')).toEqual('https://test.com/commit');
});
+
+ it('renders lock icon', () => {
+ factory({
+ id: '1',
+ sha: '123',
+ path: 'test',
+ type: 'tree',
+ currentPath: '/',
+ });
+
+ vm.setData({ commit: { lockLabel: 'Locked by Root', committedDate: '2019-01-01' } });
+
+ expect(vm.find(Icon).exists()).toBe(true);
+ });
});
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
new file mode 100644
index 00000000000..148e307a5d4
--- /dev/null
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import TreeContent from '~/repository/components/tree_content.vue';
+import FilePreview from '~/repository/components/preview/index.vue';
+
+let vm;
+let $apollo;
+
+function factory(path, data = () => ({})) {
+ $apollo = {
+ query: jest.fn().mockReturnValue(Promise.resolve({ data: data() })),
+ };
+
+ vm = shallowMount(TreeContent, {
+ propsData: {
+ path,
+ },
+ mocks: {
+ $apollo,
+ },
+ });
+}
+
+describe('Repository table component', () => {
+ afterEach(() => {
+ vm.destroy();
+ });
+
+ it('renders file preview', () => {
+ factory('/');
+
+ vm.setData({ entries: { blobs: [{ name: 'README.md' }] } });
+
+ expect(vm.find(FilePreview).exists()).toBe(true);
+ });
+
+ describe('normalizeData', () => {
+ it('normalizes edge nodes', () => {
+ factory('/');
+
+ const output = vm.vm.normalizeData('blobs', [{ node: '1' }, { node: '2' }]);
+
+ expect(output).toEqual(['1', '2']);
+ });
+ });
+
+ describe('hasNextPage', () => {
+ it('returns undefined when hasNextPage is false', () => {
+ factory('/');
+
+ const output = vm.vm.hasNextPage({
+ trees: { pageInfo: { hasNextPage: false } },
+ submodules: { pageInfo: { hasNextPage: false } },
+ blobs: { pageInfo: { hasNextPage: false } },
+ });
+
+ expect(output).toBe(undefined);
+ });
+
+ it('returns pageInfo object when hasNextPage is true', () => {
+ factory('/');
+
+ const output = vm.vm.hasNextPage({
+ trees: { pageInfo: { hasNextPage: false } },
+ submodules: { pageInfo: { hasNextPage: false } },
+ blobs: { pageInfo: { hasNextPage: true, nextCursor: 'test' } },
+ });
+
+ expect(output).toEqual({ hasNextPage: true, nextCursor: 'test' });
+ });
+ });
+});
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index a3a766eca41..9199c726680 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import { normalizeData, resolveCommit, fetchLogsTree } from '~/repository/log_tree';
+import { resolveCommit, fetchLogsTree } from '~/repository/log_tree';
const mockData = [
{
@@ -15,22 +15,6 @@ const mockData = [
},
];
-describe('normalizeData', () => {
- it('normalizes data into LogTreeCommit object', () => {
- expect(normalizeData(mockData)).toEqual([
- {
- sha: '123',
- message: 'testing message',
- committedDate: '2019-01-01',
- commitPath: 'https://test.com',
- fileName: 'index.js',
- type: 'blob',
- __typename: 'LogTreeCommit',
- },
- ]);
- });
-});
-
describe('resolveCommit', () => {
it('calls resolve when commit found', () => {
const resolver = {
@@ -57,7 +41,7 @@ describe('fetchLogsTree', () => {
jest.spyOn(axios, 'get');
- global.gon = { gitlab_url: 'https://test.com' };
+ global.gon = { relative_url_root: '' };
client = {
readQuery: () => ({
@@ -80,10 +64,9 @@ describe('fetchLogsTree', () => {
it('calls axios get', () =>
fetchLogsTree(client, '', '0', resolver).then(() => {
- expect(axios.get).toHaveBeenCalledWith(
- 'https://test.com/gitlab-org/gitlab-foss/refs/master/logs_tree',
- { params: { format: 'json', offset: '0' } },
- );
+ expect(axios.get).toHaveBeenCalledWith('/gitlab-org/gitlab-foss/refs/master/logs_tree/', {
+ params: { format: 'json', offset: '0' },
+ });
}));
it('calls axios get once', () =>
diff --git a/spec/frontend/repository/pages/index_spec.js b/spec/frontend/repository/pages/index_spec.js
new file mode 100644
index 00000000000..c0afb7931b1
--- /dev/null
+++ b/spec/frontend/repository/pages/index_spec.js
@@ -0,0 +1,42 @@
+import { shallowMount } from '@vue/test-utils';
+import IndexPage from '~/repository/pages/index.vue';
+import TreePage from '~/repository/pages/tree.vue';
+import { updateElementsVisibility } from '~/repository/utils/dom';
+
+jest.mock('~/repository/utils/dom');
+
+describe('Repository index page component', () => {
+ let wrapper;
+
+ function factory() {
+ wrapper = shallowMount(IndexPage);
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+
+ updateElementsVisibility.mockClear();
+ });
+
+ it('calls updateElementsVisibility on mounted', () => {
+ factory();
+
+ expect(updateElementsVisibility).toHaveBeenCalledWith('.js-show-on-project-root', true);
+ });
+
+ it('calls updateElementsVisibility after destroy', () => {
+ factory();
+ wrapper.destroy();
+
+ expect(updateElementsVisibility.mock.calls.pop()).toEqual(['.js-show-on-project-root', false]);
+ });
+
+ it('renders TreePage', () => {
+ factory();
+
+ const child = wrapper.find(TreePage);
+
+ expect(child.exists()).toBe(true);
+ expect(child.props()).toEqual({ path: '/' });
+ });
+});
diff --git a/spec/frontend/repository/pages/tree_spec.js b/spec/frontend/repository/pages/tree_spec.js
new file mode 100644
index 00000000000..36662696c91
--- /dev/null
+++ b/spec/frontend/repository/pages/tree_spec.js
@@ -0,0 +1,60 @@
+import { shallowMount } from '@vue/test-utils';
+import TreePage from '~/repository/pages/tree.vue';
+import { updateElementsVisibility } from '~/repository/utils/dom';
+
+jest.mock('~/repository/utils/dom');
+
+describe('Repository tree page component', () => {
+ let wrapper;
+
+ function factory(path) {
+ wrapper = shallowMount(TreePage, { propsData: { path } });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+
+ updateElementsVisibility.mockClear();
+ });
+
+ describe('when root path', () => {
+ beforeEach(() => {
+ factory('/');
+ });
+
+ it('shows root elements', () => {
+ expect(updateElementsVisibility.mock.calls).toEqual([
+ ['.js-show-on-root', true],
+ ['.js-hide-on-root', false],
+ ]);
+ });
+
+ describe('when changed', () => {
+ beforeEach(() => {
+ updateElementsVisibility.mockClear();
+
+ wrapper.setProps({ path: '/test' });
+ });
+
+ it('hides root elements', () => {
+ expect(updateElementsVisibility.mock.calls).toEqual([
+ ['.js-show-on-root', false],
+ ['.js-hide-on-root', true],
+ ]);
+ });
+ });
+ });
+
+ describe('when non-root path', () => {
+ beforeEach(() => {
+ factory('/test');
+ });
+
+ it('hides root elements', () => {
+ expect(updateElementsVisibility.mock.calls).toEqual([
+ ['.js-show-on-root', false],
+ ['.js-hide-on-root', true],
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/repository/utils/commit_spec.js b/spec/frontend/repository/utils/commit_spec.js
new file mode 100644
index 00000000000..2d75358106c
--- /dev/null
+++ b/spec/frontend/repository/utils/commit_spec.js
@@ -0,0 +1,30 @@
+import { normalizeData } from '~/repository/utils/commit';
+
+const mockData = [
+ {
+ commit: {
+ id: '123',
+ message: 'testing message',
+ committed_date: '2019-01-01',
+ },
+ commit_path: `https://test.com`,
+ file_name: 'index.js',
+ type: 'blob',
+ },
+];
+
+describe('normalizeData', () => {
+ it('normalizes data into LogTreeCommit object', () => {
+ expect(normalizeData(mockData)).toEqual([
+ {
+ sha: '123',
+ message: 'testing message',
+ committedDate: '2019-01-01',
+ commitPath: 'https://test.com',
+ fileName: 'index.js',
+ type: 'blob',
+ __typename: 'LogTreeCommit',
+ },
+ ]);
+ });
+});
diff --git a/spec/frontend/repository/utils/dom_spec.js b/spec/frontend/repository/utils/dom_spec.js
new file mode 100644
index 00000000000..678d444904d
--- /dev/null
+++ b/spec/frontend/repository/utils/dom_spec.js
@@ -0,0 +1,20 @@
+import { setHTMLFixture } from '../../helpers/fixtures';
+import { updateElementsVisibility } from '~/repository/utils/dom';
+
+describe('updateElementsVisibility', () => {
+ it('adds hidden class', () => {
+ setHTMLFixture('<div class="js-test"></div>');
+
+ updateElementsVisibility('.js-test', false);
+
+ expect(document.querySelector('.js-test').classList).toContain('hidden');
+ });
+
+ it('removes hidden class', () => {
+ setHTMLFixture('<div class="hidden js-test"></div>');
+
+ updateElementsVisibility('.js-test', true);
+
+ expect(document.querySelector('.js-test').classList).not.toContain('hidden');
+ });
+});
diff --git a/spec/frontend/repository/utils/readme_spec.js b/spec/frontend/repository/utils/readme_spec.js
new file mode 100644
index 00000000000..6b7876c8947
--- /dev/null
+++ b/spec/frontend/repository/utils/readme_spec.js
@@ -0,0 +1,33 @@
+import { readmeFile } from '~/repository/utils/readme';
+
+describe('readmeFile', () => {
+ describe('markdown files', () => {
+ it('returns markdown file', () => {
+ expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
+ name: 'README.md',
+ });
+
+ expect(readmeFile([{ name: 'README' }, { name: 'index.md' }])).toEqual({
+ name: 'index.md',
+ });
+ });
+ });
+
+ describe('plain files', () => {
+ it('returns plain file', () => {
+ expect(readmeFile([{ name: 'README' }, { name: 'TEST.md' }])).toEqual({
+ name: 'README',
+ });
+
+ expect(readmeFile([{ name: 'readme' }, { name: 'TEST.md' }])).toEqual({
+ name: 'readme',
+ });
+ });
+ });
+
+ describe('non-previewable file', () => {
+ it('returns undefined', () => {
+ expect(readmeFile([{ name: 'index.js' }, { name: 'TEST.md' }])).toBe(undefined);
+ });
+ });
+});
diff --git a/spec/frontend/repository/utils/title_spec.js b/spec/frontend/repository/utils/title_spec.js
index c4879716fd7..63035933424 100644
--- a/spec/frontend/repository/utils/title_spec.js
+++ b/spec/frontend/repository/utils/title_spec.js
@@ -8,8 +8,8 @@ describe('setTitle', () => {
${'app/assets'} | ${'app/assets'}
${'app/assets/javascripts'} | ${'app/assets/javascripts'}
`('sets document title as $title for $path', ({ path, title }) => {
- setTitle(path, 'master', 'GitLab');
+ setTitle(path, 'master', 'GitLab Org / GitLab');
- expect(document.title).toEqual(`${title} · master · GitLab`);
+ expect(document.title).toEqual(`${title} · master · GitLab Org / GitLab · GitLab`);
});
});
diff --git a/spec/javascripts/raven/index_spec.js b/spec/frontend/sentry/index_spec.js
index 6b9fe923624..82b6c445d96 100644
--- a/spec/javascripts/raven/index_spec.js
+++ b/spec/frontend/sentry/index_spec.js
@@ -1,8 +1,8 @@
-import RavenConfig from '~/raven/raven_config';
-import index from '~/raven/index';
+import SentryConfig from '~/sentry/sentry_config';
+import index from '~/sentry/index';
-describe('RavenConfig options', () => {
- const sentryDsn = 'sentryDsn';
+describe('SentryConfig options', () => {
+ const dsn = 'https://123@sentry.gitlab.test/123';
const currentUserId = 'currentUserId';
const gitlabUrl = 'gitlabUrl';
const environment = 'test';
@@ -11,7 +11,7 @@ describe('RavenConfig options', () => {
beforeEach(() => {
window.gon = {
- sentry_dsn: sentryDsn,
+ sentry_dsn: dsn,
sentry_environment: environment,
current_user_id: currentUserId,
gitlab_url: gitlabUrl,
@@ -20,14 +20,14 @@ describe('RavenConfig options', () => {
process.env.HEAD_COMMIT_SHA = revision;
- spyOn(RavenConfig, 'init');
+ jest.spyOn(SentryConfig, 'init').mockImplementation();
indexReturnValue = index();
});
it('should init with .sentryDsn, .currentUserId, .whitelistUrls and environment', () => {
- expect(RavenConfig.init).toHaveBeenCalledWith({
- sentryDsn,
+ expect(SentryConfig.init).toHaveBeenCalledWith({
+ dsn,
currentUserId,
whitelistUrls: [gitlabUrl, 'webpack-internal://'],
environment,
@@ -38,7 +38,7 @@ describe('RavenConfig options', () => {
});
});
- it('should return RavenConfig', () => {
- expect(indexReturnValue).toBe(RavenConfig);
+ it('should return SentryConfig', () => {
+ expect(indexReturnValue).toBe(SentryConfig);
});
});
diff --git a/spec/frontend/sentry/sentry_config_spec.js b/spec/frontend/sentry/sentry_config_spec.js
new file mode 100644
index 00000000000..62b8bbd50a2
--- /dev/null
+++ b/spec/frontend/sentry/sentry_config_spec.js
@@ -0,0 +1,214 @@
+import * as Sentry from '@sentry/browser';
+import SentryConfig from '~/sentry/sentry_config';
+
+describe('SentryConfig', () => {
+ describe('IGNORE_ERRORS', () => {
+ it('should be an array of strings', () => {
+ const areStrings = SentryConfig.IGNORE_ERRORS.every(error => typeof error === 'string');
+
+ expect(areStrings).toBe(true);
+ });
+ });
+
+ describe('BLACKLIST_URLS', () => {
+ it('should be an array of regexps', () => {
+ const areRegExps = SentryConfig.BLACKLIST_URLS.every(url => url instanceof RegExp);
+
+ expect(areRegExps).toBe(true);
+ });
+ });
+
+ describe('SAMPLE_RATE', () => {
+ it('should be a finite number', () => {
+ expect(typeof SentryConfig.SAMPLE_RATE).toEqual('number');
+ });
+ });
+
+ describe('init', () => {
+ const options = {
+ currentUserId: 1,
+ };
+
+ beforeEach(() => {
+ jest.spyOn(SentryConfig, 'configure');
+ jest.spyOn(SentryConfig, 'bindSentryErrors');
+ jest.spyOn(SentryConfig, 'setUser');
+
+ SentryConfig.init(options);
+ });
+
+ it('should set the options property', () => {
+ expect(SentryConfig.options).toEqual(options);
+ });
+
+ it('should call the configure method', () => {
+ expect(SentryConfig.configure).toHaveBeenCalled();
+ });
+
+ it('should call the error bindings method', () => {
+ expect(SentryConfig.bindSentryErrors).toHaveBeenCalled();
+ });
+
+ it('should call setUser', () => {
+ expect(SentryConfig.setUser).toHaveBeenCalled();
+ });
+
+ it('should not call setUser if there is no current user ID', () => {
+ jest.clearAllMocks();
+
+ options.currentUserId = undefined;
+
+ SentryConfig.init(options);
+
+ expect(SentryConfig.setUser).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('configure', () => {
+ const sentryConfig = {};
+ const options = {
+ dsn: 'https://123@sentry.gitlab.test/123',
+ whitelistUrls: ['//gitlabUrl', 'webpack-internal://'],
+ environment: 'test',
+ release: 'revision',
+ tags: {
+ revision: 'revision',
+ },
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'init').mockImplementation();
+
+ sentryConfig.options = options;
+ sentryConfig.IGNORE_ERRORS = 'ignore_errors';
+ sentryConfig.BLACKLIST_URLS = 'blacklist_urls';
+
+ SentryConfig.configure.call(sentryConfig);
+ });
+
+ it('should call Sentry.init', () => {
+ expect(Sentry.init).toHaveBeenCalledWith({
+ dsn: options.dsn,
+ release: options.release,
+ tags: options.tags,
+ sampleRate: 0.95,
+ whitelistUrls: options.whitelistUrls,
+ environment: 'test',
+ ignoreErrors: sentryConfig.IGNORE_ERRORS,
+ blacklistUrls: sentryConfig.BLACKLIST_URLS,
+ });
+ });
+
+ it('should set environment from options', () => {
+ sentryConfig.options.environment = 'development';
+
+ SentryConfig.configure.call(sentryConfig);
+
+ expect(Sentry.init).toHaveBeenCalledWith({
+ dsn: options.dsn,
+ release: options.release,
+ tags: options.tags,
+ sampleRate: 0.95,
+ whitelistUrls: options.whitelistUrls,
+ environment: 'development',
+ ignoreErrors: sentryConfig.IGNORE_ERRORS,
+ blacklistUrls: sentryConfig.BLACKLIST_URLS,
+ });
+ });
+ });
+
+ describe('setUser', () => {
+ let sentryConfig;
+
+ beforeEach(() => {
+ sentryConfig = { options: { currentUserId: 1 } };
+ jest.spyOn(Sentry, 'setUser');
+
+ SentryConfig.setUser.call(sentryConfig);
+ });
+
+ it('should call .setUser', () => {
+ expect(Sentry.setUser).toHaveBeenCalledWith({
+ id: sentryConfig.options.currentUserId,
+ });
+ });
+ });
+
+ describe('handleSentryErrors', () => {
+ let event;
+ let req;
+ let config;
+ let err;
+
+ beforeEach(() => {
+ event = {};
+ req = { status: 'status', responseText: 'Unknown response text', statusText: 'statusText' };
+ config = { type: 'type', url: 'url', data: 'data' };
+ err = {};
+
+ jest.spyOn(Sentry, 'captureMessage');
+
+ SentryConfig.handleSentryErrors(event, req, config, err);
+ });
+
+ it('should call Sentry.captureMessage', () => {
+ expect(Sentry.captureMessage).toHaveBeenCalledWith(err, {
+ extra: {
+ type: config.type,
+ url: config.url,
+ data: config.data,
+ status: req.status,
+ response: req.responseText,
+ error: err,
+ event,
+ },
+ });
+ });
+
+ describe('if no err is provided', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+
+ SentryConfig.handleSentryErrors(event, req, config);
+ });
+
+ it('should use req.statusText as the error value', () => {
+ expect(Sentry.captureMessage).toHaveBeenCalledWith(req.statusText, {
+ extra: {
+ type: config.type,
+ url: config.url,
+ data: config.data,
+ status: req.status,
+ response: req.responseText,
+ error: req.statusText,
+ event,
+ },
+ });
+ });
+ });
+
+ describe('if no req.responseText is provided', () => {
+ beforeEach(() => {
+ req.responseText = undefined;
+
+ jest.clearAllMocks();
+
+ SentryConfig.handleSentryErrors(event, req, config, err);
+ });
+
+ it('should use `Unknown response text` as the response', () => {
+ expect(Sentry.captureMessage).toHaveBeenCalledWith(err, {
+ extra: {
+ type: config.type,
+ url: config.url,
+ data: config.data,
+ status: req.status,
+ response: 'Unknown response text',
+ error: err,
+ event,
+ },
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
index 452d4cd07cc..d0d1af56872 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
@@ -24,6 +24,7 @@ describe('AssigneeAvatarLink component', () => {
};
wrapper = shallowMount(AssigneeAvatarLink, {
+ attachToDocument: true,
propsData,
sync: false,
});
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
index ff0c8d181b5..c88ae196875 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
@@ -16,6 +16,7 @@ describe('CollapsedAssigneeList component', () => {
};
wrapper = shallowMount(CollapsedAssigneeList, {
+ attachToDocument: true,
propsData,
sync: false,
});
diff --git a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
index 6398351834c..1de21f30d21 100644
--- a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
@@ -18,6 +18,7 @@ describe('UncollapsedAssigneeList component', () => {
};
wrapper = mount(UncollapsedAssigneeList, {
+ attachToDocument: true,
sync: false,
propsData,
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
new file mode 100644
index 00000000000..95296de5a5d
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -0,0 +1,37 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`SplitButton renders actionItems 1`] = `
+<gldropdown-stub
+ menu-class="dropdown-menu-selectable "
+ split="true"
+ text="professor"
+>
+ <gldropdownitem-stub
+ active="true"
+ active-class="is-active"
+ >
+ <strong>
+ professor
+ </strong>
+
+ <div>
+ very symphonic
+ </div>
+ </gldropdownitem-stub>
+
+ <gldropdowndivider-stub />
+ <gldropdownitem-stub
+ active-class="is-active"
+ >
+ <strong>
+ captain
+ </strong>
+
+ <div>
+ warp drive
+ </div>
+ </gldropdownitem-stub>
+
+ <!---->
+</gldropdown-stub>
+`;
diff --git a/spec/javascripts/vue_shared/components/commit_spec.js b/spec/frontend/vue_shared/components/commit_spec.js
index f89627e727b..77d8e00cf00 100644
--- a/spec/javascripts/vue_shared/components/commit_spec.js
+++ b/spec/frontend/vue_shared/components/commit_spec.js
@@ -1,22 +1,27 @@
-import Vue from 'vue';
-import commitComp from '~/vue_shared/components/commit.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
+import CommitComponent from '~/vue_shared/components/commit.vue';
+import Icon from '~/vue_shared/components/icon.vue';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
describe('Commit component', () => {
let props;
- let component;
- let CommitComponent;
+ let wrapper;
- beforeEach(() => {
- CommitComponent = Vue.extend(commitComp);
- });
+ const findUserAvatar = () => wrapper.find(UserAvatarLink);
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(CommitComponent, {
+ propsData,
+ sync: false,
+ });
+ };
afterEach(() => {
- component.$destroy();
+ wrapper.destroy();
});
it('should render a fork icon if it does not represent a tag', () => {
- component = mountComponent(CommitComponent, {
+ createComponent({
tag: false,
commitRef: {
name: 'master',
@@ -34,7 +39,12 @@ describe('Commit component', () => {
},
});
- expect(component.$el.querySelector('.icon-container').children).toContain('svg');
+ expect(
+ wrapper
+ .find('.icon-container')
+ .find(Icon)
+ .exists(),
+ ).toBe(true);
});
describe('Given all the props', () => {
@@ -56,68 +66,51 @@ describe('Commit component', () => {
username: 'jschatz1',
},
};
-
- component = mountComponent(CommitComponent, props);
+ createComponent(props);
});
it('should render a tag icon if it represents a tag', () => {
- expect(component.$el.querySelector('.icon-container svg.ic-tag')).not.toBeNull();
+ expect(wrapper.find('icon-stub[name="tag"]').exists()).toBe(true);
});
it('should render a link to the ref url', () => {
- expect(component.$el.querySelector('.ref-name').getAttribute('href')).toEqual(
- props.commitRef.ref_url,
- );
+ expect(wrapper.find('.ref-name').attributes('href')).toBe(props.commitRef.ref_url);
});
it('should render the ref name', () => {
- expect(component.$el.querySelector('.ref-name').textContent).toContain(props.commitRef.name);
+ expect(wrapper.find('.ref-name').text()).toContain(props.commitRef.name);
});
it('should render the commit short sha with a link to the commit url', () => {
- expect(component.$el.querySelector('.commit-sha').getAttribute('href')).toEqual(
- props.commitUrl,
- );
+ expect(wrapper.find('.commit-sha').attributes('href')).toEqual(props.commitUrl);
- expect(component.$el.querySelector('.commit-sha').textContent).toContain(props.shortSha);
+ expect(wrapper.find('.commit-sha').text()).toContain(props.shortSha);
});
it('should render icon for commit', () => {
- expect(
- component.$el.querySelector('.js-commit-icon use').getAttribute('xlink:href'),
- ).toContain('commit');
+ expect(wrapper.find('icon-stub[name="commit"]').exists()).toBe(true);
});
describe('Given commit title and author props', () => {
it('should render a link to the author profile', () => {
- expect(
- component.$el.querySelector('.commit-title .avatar-image-container').getAttribute('href'),
- ).toEqual(props.author.path);
+ const userAvatar = findUserAvatar();
+
+ expect(userAvatar.props('linkHref')).toBe(props.author.path);
});
it('Should render the author avatar with title and alt attributes', () => {
- expect(
- component.$el
- .querySelector('.commit-title .avatar-image-container .js-user-avatar-image-toolip')
- .textContent.trim(),
- ).toContain(props.author.username);
-
- expect(
- component.$el
- .querySelector('.commit-title .avatar-image-container img')
- .getAttribute('alt'),
- ).toContain(`${props.author.username}'s avatar`);
+ const userAvatar = findUserAvatar();
+
+ expect(userAvatar.exists()).toBe(true);
+
+ expect(userAvatar.props('imgAlt')).toBe(`${props.author.username}'s avatar`);
});
});
it('should render the commit title', () => {
- expect(component.$el.querySelector('a.commit-row-message').getAttribute('href')).toEqual(
- props.commitUrl,
- );
+ expect(wrapper.find('.commit-row-message').attributes('href')).toEqual(props.commitUrl);
- expect(component.$el.querySelector('a.commit-row-message').textContent).toContain(
- props.title,
- );
+ expect(wrapper.find('.commit-row-message').text()).toContain(props.title);
});
});
@@ -136,9 +129,9 @@ describe('Commit component', () => {
author: {},
};
- component = mountComponent(CommitComponent, props);
+ createComponent(props);
- expect(component.$el.querySelector('.commit-title span').textContent).toContain(
+ expect(wrapper.find('.commit-title span').text()).toContain(
"Can't find HEAD commit for this branch",
);
});
@@ -159,16 +152,16 @@ describe('Commit component', () => {
author: {},
};
- component = mountComponent(CommitComponent, props);
- const refEl = component.$el.querySelector('.ref-name');
+ createComponent(props);
+ const refEl = wrapper.find('.ref-name');
- expect(refEl.textContent).toContain('master');
+ expect(refEl.text()).toContain('master');
- expect(refEl.href).toBe(props.commitRef.ref_url);
+ expect(refEl.attributes('href')).toBe(props.commitRef.ref_url);
- expect(refEl.getAttribute('data-original-title')).toBe(props.commitRef.name);
+ expect(refEl.attributes('data-original-title')).toBe(props.commitRef.name);
- expect(component.$el.querySelector('.icon-container .ic-branch')).not.toBeNull();
+ expect(wrapper.find('icon-stub[name="branch"]').exists()).toBe(true);
});
});
@@ -192,16 +185,16 @@ describe('Commit component', () => {
author: {},
};
- component = mountComponent(CommitComponent, props);
- const refEl = component.$el.querySelector('.ref-name');
+ createComponent(props);
+ const refEl = wrapper.find('.ref-name');
- expect(refEl.textContent).toContain('1234');
+ expect(refEl.text()).toContain('1234');
- expect(refEl.href).toBe(props.mergeRequestRef.path);
+ expect(refEl.attributes('href')).toBe(props.mergeRequestRef.path);
- expect(refEl.getAttribute('data-original-title')).toBe(props.mergeRequestRef.title);
+ expect(refEl.attributes('data-original-title')).toBe(props.mergeRequestRef.title);
- expect(component.$el.querySelector('.icon-container .ic-git-merge')).not.toBeNull();
+ expect(wrapper.find('icon-stub[name="git-merge"]').exists()).toBe(true);
});
});
@@ -226,9 +219,9 @@ describe('Commit component', () => {
showRefInfo: false,
};
- component = mountComponent(CommitComponent, props);
+ createComponent(props);
- expect(component.$el.querySelector('.ref-name')).toBeNull();
+ expect(wrapper.find('.ref-name').exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_shared/components/content_viewer/viewers/image_viewer_spec.js b/spec/frontend/vue_shared/components/content_viewer/viewers/image_viewer_spec.js
new file mode 100644
index 00000000000..3ad8f3aec7c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/content_viewer/viewers/image_viewer_spec.js
@@ -0,0 +1,45 @@
+import { shallowMount } from '@vue/test-utils';
+
+import ImageViewer from '~/vue_shared/components/content_viewer/viewers/image_viewer.vue';
+import { GREEN_BOX_IMAGE_URL } from 'spec/test_constants';
+
+describe('Image Viewer', () => {
+ const requiredProps = {
+ path: GREEN_BOX_IMAGE_URL,
+ renderInfo: true,
+ };
+ let wrapper;
+ let imageInfo;
+
+ function createElement({ props, includeRequired = true } = {}) {
+ const data = includeRequired ? { ...requiredProps, ...props } : { ...props };
+
+ wrapper = shallowMount(ImageViewer, {
+ propsData: data,
+ });
+ imageInfo = wrapper.find('.image-info');
+ }
+
+ describe('file sizes', () => {
+ it('should show the humanized file size when `renderInfo` is true and there is size info', () => {
+ createElement({ props: { fileSize: 1024 } });
+
+ expect(imageInfo.text()).toContain('1.00 KiB');
+ });
+
+ it('should not show the humanized file size when `renderInfo` is true and there is no size', () => {
+ const FILESIZE_RE = /\d+(\.\d+)?\s*([KMGTP]i)*B/;
+
+ createElement({ props: { fileSize: 0 } });
+
+ // It shouldn't show any filesize info
+ expect(imageInfo.text()).not.toMatch(FILESIZE_RE);
+ });
+
+ it('should not show any image information when `renderInfo` is false', () => {
+ createElement({ props: { renderInfo: false } });
+
+ expect(imageInfo.exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
index d1de98f4a15..9e6b5286899 100644
--- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
@@ -1,114 +1,129 @@
-import Vue from 'vue';
-
+import { shallowMount } from '@vue/test-utils';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
import { mockAssigneesList } from '../../../../javascripts/boards/mock_data';
-const createComponent = (assignees = mockAssigneesList, cssClass = '') => {
- const Component = Vue.extend(IssueAssignees);
-
- return mountComponent(Component, {
- assignees,
- cssClass,
- });
-};
+const TEST_CSS_CLASSES = 'test-classes';
+const TEST_MAX_VISIBLE = 4;
+const TEST_ICON_SIZE = 16;
describe('IssueAssigneesComponent', () => {
+ let wrapper;
let vm;
- beforeEach(() => {
- vm = createComponent();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('data', () => {
- it('returns default data props', () => {
- expect(vm.maxVisibleAssignees).toBe(2);
- expect(vm.maxAssigneeAvatars).toBe(3);
- expect(vm.maxAssignees).toBe(99);
+ const factory = props => {
+ wrapper = shallowMount(IssueAssignees, {
+ propsData: {
+ assignees: mockAssigneesList,
+ ...props,
+ },
+ sync: false,
});
+ vm = wrapper.vm; // eslint-disable-line
+ };
+
+ const findTooltipText = () => wrapper.find('.js-assignee-tooltip').text();
+ const findAvatars = () => wrapper.findAll(UserAvatarLink);
+ const findOverflowCounter = () => wrapper.find('.avatar-counter');
+
+ it('returns default data props', () => {
+ factory({ assignees: mockAssigneesList });
+ expect(vm.iconSize).toBe(24);
+ expect(vm.maxVisible).toBe(3);
+ expect(vm.maxAssignees).toBe(99);
});
- describe('computed', () => {
- describe('countOverLimit', () => {
- it('should return difference between assignees count and maxVisibleAssignees', () => {
- expect(vm.countOverLimit).toBe(mockAssigneesList.length - vm.maxVisibleAssignees);
- });
- });
-
- describe('assigneesToShow', () => {
- it('should return assignees containing only 2 items when count more than maxAssigneeAvatars', () => {
- expect(vm.assigneesToShow.length).toBe(2);
- });
-
- it('should return all assignees as it is when count less than maxAssigneeAvatars', () => {
- vm.assignees = mockAssigneesList.slice(0, 3); // Set 3 Assignees
-
- expect(vm.assigneesToShow.length).toBe(3);
- });
- });
-
- describe('assigneesCounterTooltip', () => {
- it('should return string containing count of remaining assignees when count more than maxAssigneeAvatars', () => {
- expect(vm.assigneesCounterTooltip).toBe('3 more assignees');
- });
- });
-
- describe('shouldRenderAssigneesCounter', () => {
- it('should return `false` when assignees count less than maxAssigneeAvatars', () => {
- vm.assignees = mockAssigneesList.slice(0, 3); // Set 3 Assignees
-
- expect(vm.shouldRenderAssigneesCounter).toBe(false);
- });
-
- it('should return `true` when assignees count more than maxAssigneeAvatars', () => {
- expect(vm.shouldRenderAssigneesCounter).toBe(true);
+ describe.each`
+ numAssignees | maxVisible | expectedShown | expectedHidden
+ ${0} | ${3} | ${0} | ${''}
+ ${1} | ${3} | ${1} | ${''}
+ ${2} | ${3} | ${2} | ${''}
+ ${3} | ${3} | ${3} | ${''}
+ ${4} | ${3} | ${2} | ${'+2'}
+ ${5} | ${2} | ${1} | ${'+4'}
+ ${1000} | ${5} | ${4} | ${'99+'}
+ `(
+ 'with assignees ($numAssignees) and maxVisible ($maxVisible)',
+ ({ numAssignees, maxVisible, expectedShown, expectedHidden }) => {
+ beforeEach(() => {
+ factory({ assignees: Array(numAssignees).fill({}), maxVisible });
});
- });
- describe('assigneeCounterLabel', () => {
- it('should return count of additional assignees total assignees count more than maxAssigneeAvatars', () => {
- expect(vm.assigneeCounterLabel).toBe('+3');
+ if (expectedShown) {
+ it('shows assignee avatars', () => {
+ expect(findAvatars().length).toEqual(expectedShown);
+ });
+ } else {
+ it('does not show assignee avatars', () => {
+ expect(findAvatars().length).toEqual(0);
+ });
+ }
+
+ if (expectedHidden) {
+ it('shows overflow counter', () => {
+ const hiddenCount = numAssignees - expectedShown;
+
+ expect(findOverflowCounter().exists()).toBe(true);
+ expect(findOverflowCounter().text()).toEqual(expectedHidden.toString());
+ expect(findOverflowCounter().attributes('data-original-title')).toEqual(
+ `${hiddenCount} more assignees`,
+ );
+ });
+ } else {
+ it('does not show overflow counter', () => {
+ expect(findOverflowCounter().exists()).toBe(false);
+ });
+ }
+ },
+ );
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ factory({
+ imgCssClasses: TEST_CSS_CLASSES,
+ maxVisible: TEST_MAX_VISIBLE,
+ iconSize: TEST_ICON_SIZE,
});
});
- });
- describe('methods', () => {
- describe('avatarUrlTitle', () => {
- it('returns string containing alt text for assignee avatar', () => {
- expect(vm.avatarUrlTitle(mockAssigneesList[0])).toBe('Avatar for Terrell Graham');
- });
+ it('computes alt text for assignee avatar', () => {
+ expect(vm.avatarUrlTitle(mockAssigneesList[0])).toBe('Avatar for Terrell Graham');
});
- });
- describe('template', () => {
it('renders component root element with class `issue-assignees`', () => {
- expect(vm.$el.classList.contains('issue-assignees')).toBe(true);
+ expect(wrapper.element.classList.contains('issue-assignees')).toBe(true);
});
- it('renders assignee avatars', () => {
- expect(vm.$el.querySelectorAll('.user-avatar-link').length).toBe(2);
+ it('renders assignee', () => {
+ const data = findAvatars().wrappers.map(x => ({
+ ...x.props(),
+ }));
+
+ const expected = mockAssigneesList.slice(0, TEST_MAX_VISIBLE - 1).map(x =>
+ expect.objectContaining({
+ linkHref: x.web_url,
+ imgAlt: `Avatar for ${x.name}`,
+ imgCssClasses: TEST_CSS_CLASSES,
+ imgSrc: x.avatar_url,
+ imgSize: TEST_ICON_SIZE,
+ }),
+ );
+
+ expect(data).toEqual(expected);
});
- it('renders assignee tooltips', () => {
- const tooltipText = vm.$el
- .querySelectorAll('.user-avatar-link')[0]
- .querySelector('.js-assignee-tooltip').innerText;
-
- expect(tooltipText).toContain('Assignee');
- expect(tooltipText).toContain('Terrell Graham');
- expect(tooltipText).toContain('@monserrate.gleichner');
- });
+ describe('assignee tooltips', () => {
+ it('renders "Assignee" header', () => {
+ expect(findTooltipText()).toContain('Assignee');
+ });
- it('renders additional assignees count', () => {
- const avatarCounterEl = vm.$el.querySelector('.avatar-counter');
+ it('renders assignee name', () => {
+ expect(findTooltipText()).toContain('Terrell Graham');
+ });
- expect(avatarCounterEl.innerText.trim()).toBe('+3');
- expect(avatarCounterEl.getAttribute('data-original-title')).toBe('3 more assignees');
+ it('renders assignee @username', () => {
+ expect(findTooltipText()).toContain('@monserrate.gleichner');
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
index eafff7f681e..45f131194ca 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import issuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
import createStore from '~/notes/stores';
-import { userDataMock } from '../../../../javascripts/notes/mock_data';
+import { userDataMock } from '../../../notes/mock_data';
describe('issue placeholder system note component', () => {
let store;
diff --git a/spec/frontend/vue_shared/components/notes/system_note_spec.js b/spec/frontend/vue_shared/components/notes/system_note_spec.js
index a65e3eb294a..c2e8359f78d 100644
--- a/spec/frontend/vue_shared/components/notes/system_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/system_note_spec.js
@@ -57,7 +57,7 @@ describe('system note component', () => {
// we need to strip them because they break layout of commit lists in system notes:
// https://gitlab.com/gitlab-org/gitlab-foss/uploads/b07a10670919254f0220d3ff5c1aa110/jqzI.png
it('removes wrapping paragraph from note HTML', () => {
- expect(vm.$el.querySelector('.system-note-message').innerHTML).toEqual('<span>closed</span>');
+ expect(vm.$el.querySelector('.system-note-message').innerHTML).toContain('<span>closed</span>');
});
it('should initMRPopovers onMount', () => {
diff --git a/spec/frontend/vue_shared/components/slot_switch_spec.js b/spec/frontend/vue_shared/components/slot_switch_spec.js
new file mode 100644
index 00000000000..cff955c05b2
--- /dev/null
+++ b/spec/frontend/vue_shared/components/slot_switch_spec.js
@@ -0,0 +1,56 @@
+import { shallowMount } from '@vue/test-utils';
+
+import SlotSwitch from '~/vue_shared/components/slot_switch';
+
+describe('SlotSwitch', () => {
+ const slots = {
+ first: '<a>AGP</a>',
+ second: '<p>PCI</p>',
+ };
+
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(SlotSwitch, {
+ propsData,
+ slots,
+ sync: false,
+ });
+ };
+
+ const getChildrenHtml = () => wrapper.findAll('* *').wrappers.map(c => c.html());
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('throws an error if activeSlotNames is missing', () => {
+ expect(createComponent).toThrow('[Vue warn]: Missing required prop: "activeSlotNames"');
+ });
+
+ it('renders no slots if activeSlotNames is empty', () => {
+ createComponent({
+ activeSlotNames: [],
+ });
+
+ expect(getChildrenHtml().length).toBe(0);
+ });
+
+ it('renders one slot if activeSlotNames contains single slot name', () => {
+ createComponent({
+ activeSlotNames: ['first'],
+ });
+
+ expect(getChildrenHtml()).toEqual([slots.first]);
+ });
+
+ it('renders multiple slots if activeSlotNames contains multiple slot names', () => {
+ createComponent({
+ activeSlotNames: Object.keys(slots),
+ });
+
+ expect(getChildrenHtml()).toEqual(Object.values(slots));
+ });
+});
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
new file mode 100644
index 00000000000..520abb02cf7
--- /dev/null
+++ b/spec/frontend/vue_shared/components/split_button_spec.js
@@ -0,0 +1,104 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import SplitButton from '~/vue_shared/components/split_button.vue';
+
+const mockActionItems = [
+ {
+ eventName: 'concert',
+ title: 'professor',
+ description: 'very symphonic',
+ },
+ {
+ eventName: 'apocalypse',
+ title: 'captain',
+ description: 'warp drive',
+ },
+];
+
+describe('SplitButton', () => {
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(SplitButton, {
+ propsData,
+ sync: false,
+ });
+ };
+
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdownItem = (index = 0) =>
+ findDropdown()
+ .findAll(GlDropdownItem)
+ .at(index);
+ const selectItem = index => {
+ findDropdownItem(index).vm.$emit('click');
+
+ return wrapper.vm.$nextTick();
+ };
+ const clickToggleButton = () => {
+ findDropdown().vm.$emit('click');
+
+ return wrapper.vm.$nextTick();
+ };
+
+ it('fails for empty actionItems', () => {
+ const actionItems = [];
+ expect(() => createComponent({ actionItems })).toThrow();
+ });
+
+ it('fails for single actionItems', () => {
+ const actionItems = [mockActionItems[0]];
+ expect(() => createComponent({ actionItems })).toThrow();
+ });
+
+ it('renders actionItems', () => {
+ createComponent({ actionItems: mockActionItems });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('toggle button text', () => {
+ beforeEach(() => {
+ createComponent({ actionItems: mockActionItems });
+ });
+
+ it('defaults to first actionItems title', () => {
+ expect(findDropdown().props().text).toBe(mockActionItems[0].title);
+ });
+
+ it('changes to selected actionItems title', () =>
+ selectItem(1).then(() => {
+ expect(findDropdown().props().text).toBe(mockActionItems[1].title);
+ }));
+ });
+
+ describe('emitted event', () => {
+ let eventHandler;
+
+ beforeEach(() => {
+ createComponent({ actionItems: mockActionItems });
+ });
+
+ const addEventHandler = ({ eventName }) => {
+ eventHandler = jest.fn();
+ wrapper.vm.$once(eventName, () => eventHandler());
+ };
+
+ it('defaults to first actionItems event', () => {
+ addEventHandler(mockActionItems[0]);
+
+ return clickToggleButton().then(() => {
+ expect(eventHandler).toHaveBeenCalled();
+ });
+ });
+
+ it('changes to selected actionItems event', () =>
+ selectItem(1)
+ .then(() => addEventHandler(mockActionItems[1]))
+ .then(clickToggleButton)
+ .then(() => {
+ expect(eventHandler).toHaveBeenCalled();
+ }));
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/table_pagination_spec.js b/spec/frontend/vue_shared/components/table_pagination_spec.js
index 258530f32f7..0a9ff36b2fb 100644
--- a/spec/javascripts/vue_shared/components/table_pagination_spec.js
+++ b/spec/frontend/vue_shared/components/table_pagination_spec.js
@@ -1,26 +1,37 @@
-import Vue from 'vue';
-import paginationComp from '~/vue_shared/components/pagination/table_pagination.vue';
+import { shallowMount } from '@vue/test-utils';
+import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
describe('Pagination component', () => {
- let component;
- let PaginationComponent;
+ let wrapper;
let spy;
- let mountComponent;
+
+ const mountComponent = props => {
+ wrapper = shallowMount(TablePagination, {
+ sync: false,
+ propsData: props,
+ });
+ };
+
+ const findFirstButtonLink = () => wrapper.find('.js-first-button .page-link');
+ const findPreviousButton = () => wrapper.find('.js-previous-button');
+ const findPreviousButtonLink = () => wrapper.find('.js-previous-button .page-link');
+ const findNextButton = () => wrapper.find('.js-next-button');
+ const findNextButtonLink = () => wrapper.find('.js-next-button .page-link');
+ const findLastButtonLink = () => wrapper.find('.js-last-button .page-link');
+ const findPages = () => wrapper.findAll('.page');
+ const findSeparator = () => wrapper.find('.separator');
beforeEach(() => {
- spy = jasmine.createSpy('spy');
- PaginationComponent = Vue.extend(paginationComp);
+ spy = jest.fn();
+ });
- mountComponent = function(props) {
- return new PaginationComponent({
- propsData: props,
- }).$mount();
- };
+ afterEach(() => {
+ wrapper.destroy();
});
describe('render', () => {
it('should not render anything', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: NaN,
page: 1,
@@ -32,12 +43,12 @@ describe('Pagination component', () => {
change: spy,
});
- expect(component.$el.childNodes.length).toEqual(0);
+ expect(wrapper.isEmpty()).toBe(true);
});
describe('prev button', () => {
it('should be disabled and non clickable', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 2,
page: 1,
@@ -49,17 +60,13 @@ describe('Pagination component', () => {
change: spy,
});
- expect(
- component.$el.querySelector('.js-previous-button').classList.contains('disabled'),
- ).toEqual(true);
-
- component.$el.querySelector('.js-previous-button .page-link').click();
-
+ expect(findPreviousButton().classes()).toContain('disabled');
+ findPreviousButtonLink().trigger('click');
expect(spy).not.toHaveBeenCalled();
});
it('should be disabled and non clickable when total and totalPages are NaN', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 2,
page: 1,
@@ -70,18 +77,13 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(
- component.$el.querySelector('.js-previous-button').classList.contains('disabled'),
- ).toEqual(true);
-
- component.$el.querySelector('.js-previous-button .page-link').click();
-
+ expect(findPreviousButton().classes()).toContain('disabled');
+ findPreviousButtonLink().trigger('click');
expect(spy).not.toHaveBeenCalled();
});
it('should be enabled and clickable', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 3,
page: 2,
@@ -92,14 +94,12 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- component.$el.querySelector('.js-previous-button .page-link').click();
-
+ findPreviousButtonLink().trigger('click');
expect(spy).toHaveBeenCalledWith(1);
});
it('should be enabled and clickable when total and totalPages are NaN', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 3,
page: 2,
@@ -110,16 +110,14 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- component.$el.querySelector('.js-previous-button .page-link').click();
-
+ findPreviousButtonLink().trigger('click');
expect(spy).toHaveBeenCalledWith(1);
});
});
describe('first button', () => {
it('should call the change callback with the first page', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 3,
page: 2,
@@ -130,18 +128,14 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- const button = component.$el.querySelector('.js-first-button .page-link');
-
- expect(button.textContent.trim()).toEqual('« First');
-
- button.click();
-
+ const button = findFirstButtonLink();
+ expect(button.text().trim()).toEqual('« First');
+ button.trigger('click');
expect(spy).toHaveBeenCalledWith(1);
});
it('should call the change callback with the first page when total and totalPages are NaN', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 3,
page: 2,
@@ -152,20 +146,16 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- const button = component.$el.querySelector('.js-first-button .page-link');
-
- expect(button.textContent.trim()).toEqual('« First');
-
- button.click();
-
+ const button = findFirstButtonLink();
+ expect(button.text().trim()).toEqual('« First');
+ button.trigger('click');
expect(spy).toHaveBeenCalledWith(1);
});
});
describe('last button', () => {
it('should call the change callback with the last page', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 3,
page: 2,
@@ -176,18 +166,14 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- const button = component.$el.querySelector('.js-last-button .page-link');
-
- expect(button.textContent.trim()).toEqual('Last »');
-
- button.click();
-
+ const button = findLastButtonLink();
+ expect(button.text().trim()).toEqual('Last »');
+ button.trigger('click');
expect(spy).toHaveBeenCalledWith(5);
});
it('should not render', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 3,
page: 2,
@@ -198,14 +184,13 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelector('.js-last-button .page-link')).toBeNull();
+ expect(findLastButtonLink().exists()).toBe(false);
});
});
describe('next button', () => {
it('should be disabled and non clickable', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: NaN,
page: 5,
@@ -216,16 +201,17 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelector('.js-next-button').textContent.trim()).toEqual('Next ›');
-
- component.$el.querySelector('.js-next-button .page-link').click();
-
+ expect(
+ findNextButton()
+ .text()
+ .trim(),
+ ).toEqual('Next ›');
+ findNextButtonLink().trigger('click');
expect(spy).not.toHaveBeenCalled();
});
it('should be disabled and non clickable when total and totalPages are NaN', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: NaN,
page: 5,
@@ -236,16 +222,17 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelector('.js-next-button').textContent.trim()).toEqual('Next ›');
-
- component.$el.querySelector('.js-next-button .page-link').click();
-
+ expect(
+ findNextButton()
+ .text()
+ .trim(),
+ ).toEqual('Next ›');
+ findNextButtonLink().trigger('click');
expect(spy).not.toHaveBeenCalled();
});
it('should be enabled and clickable', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 4,
page: 3,
@@ -256,14 +243,12 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- component.$el.querySelector('.js-next-button .page-link').click();
-
+ findNextButtonLink().trigger('click');
expect(spy).toHaveBeenCalledWith(4);
});
it('should be enabled and clickable when total and totalPages are NaN', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 4,
page: 3,
@@ -274,16 +259,14 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- component.$el.querySelector('.js-next-button .page-link').click();
-
+ findNextButtonLink().trigger('click');
expect(spy).toHaveBeenCalledWith(4);
});
});
describe('numbered buttons', () => {
it('should render 5 pages', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 4,
page: 3,
@@ -294,12 +277,11 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelectorAll('.page').length).toEqual(5);
+ expect(findPages().length).toEqual(5);
});
it('should not render any page', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 4,
page: 3,
@@ -310,14 +292,13 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelectorAll('.page').length).toEqual(0);
+ expect(findPages().length).toEqual(0);
});
});
describe('spread operator', () => {
it('should render', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 4,
page: 3,
@@ -328,12 +309,15 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelector('.separator').textContent.trim()).toEqual('...');
+ expect(
+ findSeparator()
+ .text()
+ .trim(),
+ ).toEqual('...');
});
it('should not render', () => {
- component = mountComponent({
+ mountComponent({
pageInfo: {
nextPage: 4,
page: 3,
@@ -344,8 +328,7 @@ describe('Pagination component', () => {
},
change: spy,
});
-
- expect(component.$el.querySelector('.separator')).toBeNull();
+ expect(findSeparator().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
new file mode 100644
index 00000000000..2f87359a4a6
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -0,0 +1,108 @@
+import { shallowMount } from '@vue/test-utils';
+import { placeholderImage } from '~/lazy_loader';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
+import defaultAvatarUrl from 'images/no_avatar.png';
+
+jest.mock('images/no_avatar.png', () => 'default-avatar-url');
+
+const DEFAULT_PROPS = {
+ size: 99,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ cssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+};
+
+describe('User Avatar Image Component', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Initialization', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ },
+ sync: false,
+ });
+ });
+
+ it('should have <img> as a child element', () => {
+ const imageElement = wrapper.find('img');
+
+ expect(imageElement.exists()).toBe(true);
+ expect(imageElement.attributes('src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
+ expect(imageElement.attributes('data-src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
+ expect(imageElement.attributes('alt')).toBe(DEFAULT_PROPS.imgAlt);
+ });
+
+ it('should properly render img css', () => {
+ const classes = wrapper.find('img').classes();
+ expect(classes).toEqual(expect.arrayContaining(['avatar', 's99', DEFAULT_PROPS.cssClasses]));
+ expect(classes).not.toContain('lazy');
+ });
+ });
+
+ describe('Initialization when lazy', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ lazy: true,
+ },
+ sync: false,
+ });
+ });
+
+ it('should add lazy attributes', () => {
+ const imageElement = wrapper.find('img');
+
+ expect(imageElement.classes()).toContain('lazy');
+ expect(imageElement.attributes('src')).toBe(placeholderImage);
+ expect(imageElement.attributes('data-src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
+ });
+ });
+
+ describe('Initialization without src', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, { sync: false });
+ });
+
+ it('should have default avatar image', () => {
+ const imageElement = wrapper.find('img');
+
+ expect(imageElement.attributes('src')).toBe(`${defaultAvatarUrl}?width=20`);
+ });
+ });
+
+ describe('dynamic tooltip content', () => {
+ const props = DEFAULT_PROPS;
+ const slots = {
+ default: ['Action!'],
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, { propsData: { props }, slots, sync: false });
+ });
+
+ it('renders the tooltip slot', () => {
+ expect(wrapper.find('.js-user-avatar-image-toolip').exists()).toBe(true);
+ });
+
+ it('renders the tooltip content', () => {
+ expect(wrapper.find('.js-user-avatar-image-toolip').text()).toContain(slots.default[0]);
+ });
+
+ it('does not render tooltip data attributes for on avatar image', () => {
+ const avatarImg = wrapper.find('img');
+
+ expect(avatarImg.attributes('data-original-title')).toBeFalsy();
+ expect(avatarImg.attributes('data-placement')).not.toBeDefined();
+ expect(avatarImg.attributes('data-container')).not.toBeDefined();
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
new file mode 100644
index 00000000000..fc2eb6329b0
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -0,0 +1,186 @@
+import UserPopover from '~/vue_shared/components/user_popover/user_popover.vue';
+import { mount } from '@vue/test-utils';
+
+const DEFAULT_PROPS = {
+ loaded: true,
+ user: {
+ username: 'root',
+ name: 'Administrator',
+ location: 'Vienna',
+ bio: null,
+ organization: null,
+ status: null,
+ },
+};
+
+describe('User Popover Component', () => {
+ const fixtureTemplate = 'merge_requests/diff_comment.html';
+ preloadFixtures(fixtureTemplate);
+
+ let wrapper;
+
+ beforeEach(() => {
+ loadFixtures(fixtureTemplate);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Empty', () => {
+ beforeEach(() => {
+ wrapper = mount(UserPopover, {
+ propsData: {
+ target: document.querySelector('.js-user-link'),
+ user: {
+ name: null,
+ username: null,
+ location: null,
+ bio: null,
+ organization: null,
+ status: null,
+ },
+ },
+ sync: false,
+ });
+ });
+
+ it('should return skeleton loaders', () => {
+ expect(wrapper.findAll('.animation-container').length).toBe(4);
+ });
+ });
+
+ describe('basic data', () => {
+ it('should show basic fields', () => {
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ target: document.querySelector('.js-user-link'),
+ },
+ sync: false,
+ });
+
+ expect(wrapper.text()).toContain(DEFAULT_PROPS.user.name);
+ expect(wrapper.text()).toContain(DEFAULT_PROPS.user.username);
+ expect(wrapper.text()).toContain(DEFAULT_PROPS.user.location);
+ });
+
+ it('shows icon for location', () => {
+ const iconEl = wrapper.find('.js-location svg');
+
+ expect(iconEl.find('use').element.getAttribute('xlink:href')).toContain('location');
+ });
+ });
+
+ describe('job data', () => {
+ it('should show only bio if no organization is available', () => {
+ const testProps = Object.assign({}, DEFAULT_PROPS);
+ testProps.user.bio = 'Engineer';
+
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...testProps,
+ target: document.querySelector('.js-user-link'),
+ },
+ sync: false,
+ });
+
+ expect(wrapper.text()).toContain('Engineer');
+ });
+
+ it('should show only organization if no bio is available', () => {
+ const testProps = Object.assign({}, DEFAULT_PROPS);
+ testProps.user.organization = 'GitLab';
+
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...testProps,
+ target: document.querySelector('.js-user-link'),
+ },
+ sync: false,
+ });
+
+ expect(wrapper.text()).toContain('GitLab');
+ });
+
+ it('should display bio and organization in separate lines', () => {
+ const testProps = Object.assign({}, DEFAULT_PROPS);
+ testProps.user.bio = 'Engineer';
+ testProps.user.organization = 'GitLab';
+
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ target: document.querySelector('.js-user-link'),
+ },
+ sync: false,
+ });
+
+ expect(wrapper.find('.js-bio').text()).toContain('Engineer');
+ expect(wrapper.find('.js-organization').text()).toContain('GitLab');
+ });
+
+ it('should not encode special characters in bio and organization', () => {
+ const testProps = Object.assign({}, DEFAULT_PROPS);
+ testProps.user.bio = 'Manager & Team Lead';
+ testProps.user.organization = 'Me & my <funky> Company';
+
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ target: document.querySelector('.js-user-link'),
+ },
+ sync: false,
+ });
+
+ expect(wrapper.find('.js-bio').text()).toContain('Manager & Team Lead');
+ expect(wrapper.find('.js-organization').text()).toContain('Me & my <funky> Company');
+ });
+
+ it('shows icon for bio', () => {
+ const iconEl = wrapper.find('.js-bio svg');
+
+ expect(iconEl.find('use').element.getAttribute('xlink:href')).toContain('profile');
+ });
+
+ it('shows icon for organization', () => {
+ const iconEl = wrapper.find('.js-organization svg');
+
+ expect(iconEl.find('use').element.getAttribute('xlink:href')).toContain('work');
+ });
+ });
+
+ describe('status data', () => {
+ it('should show only message', () => {
+ const testProps = Object.assign({}, DEFAULT_PROPS);
+ testProps.user.status = { message_html: 'Hello World' };
+
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ target: document.querySelector('.js-user-link'),
+ },
+ sync: false,
+ });
+
+ expect(wrapper.text()).toContain('Hello World');
+ });
+
+ it('should show message and emoji', () => {
+ const testProps = Object.assign({}, DEFAULT_PROPS);
+ testProps.user.status = { emoji: 'basketball_player', message_html: 'Hello World' };
+
+ wrapper = mount(UserPopover, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ target: document.querySelector('.js-user-link'),
+ status: { emoji: 'basketball_player', message_html: 'Hello World' },
+ },
+ sync: false,
+ });
+
+ expect(wrapper.text()).toContain('Hello World');
+ expect(wrapper.html()).toContain('<gl-emoji data-name="basketball_player"');
+ });
+ });
+});
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 9a60ff3b78c..7ad6a622b4b 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -259,7 +259,8 @@ describe 'Gitlab::Graphql::Authorization' do
let(:project_type) do |type|
type_factory do |type|
type.graphql_name 'FakeProjectType'
- type.field :test_issues, issue_type.connection_type, null: false, resolve: -> (_, _, _) { Issue.where(project: [visible_project, other_project]) }
+ type.field :test_issues, issue_type.connection_type, null: false,
+ resolve: -> (_, _, _) { Issue.where(project: [visible_project, other_project]).order(id: :asc) }
end
end
let(:query_type) do
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 0a27bbecfef..dcf3c989047 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -36,7 +36,7 @@ describe GitlabSchema do
it 'paginates active record relations using `Gitlab::Graphql::Connections::KeysetConnection`' do
connection = GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS[ActiveRecord::Relation.name]
- expect(connection).to eq(Gitlab::Graphql::Connections::KeysetConnection)
+ expect(connection).to eq(Gitlab::Graphql::Connections::Keyset::Connection)
end
describe '.execute' do
diff --git a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
new file mode 100644
index 00000000000..e8da0e25b7d
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::MergeRequests::SetAssignees do
+ let(:merge_request) { create(:merge_request) }
+ let(:user) { create(:user) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+
+ describe '#resolve' do
+ let(:assignee) { create(:user) }
+ let(:assignee2) { create(:user) }
+ let(:assignee_usernames) { [assignee.username] }
+ let(:mutated_merge_request) { subject[:merge_request] }
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: assignee_usernames) }
+
+ before do
+ merge_request.project.add_developer(assignee)
+ merge_request.project.add_developer(assignee2)
+ end
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the merge request' do
+ before do
+ merge_request.project.add_developer(user)
+ end
+
+ it 'replaces the assignee' do
+ merge_request.assignees = [assignee2]
+ merge_request.save!
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.assignees).to contain_exactly(assignee)
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors merge request could not be updated' do
+ # Make the merge request invalid
+ merge_request.allow_broken = true
+ merge_request.update!(source_project: nil)
+
+ expect(subject[:errors]).not_to be_empty
+ end
+
+ context 'when passing an empty assignee list' do
+ let(:assignee_usernames) { [] }
+
+ before do
+ merge_request.assignees = [assignee]
+ merge_request.save!
+ end
+
+ it 'removes all assignees' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.assignees).to eq([])
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing "append" as true' do
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: assignee_usernames, operation_mode: Types::MutationOperationModeEnum.enum[:append]) }
+
+ before do
+ merge_request.assignees = [assignee2]
+ merge_request.save!
+
+ # In CE, APPEND is a NOOP as you can't have multiple assignees
+ # We test multiple assignment in EE specs
+ stub_licensed_features(multiple_merge_request_assignees: false)
+ end
+
+ it 'is a NO-OP in FOSS' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.assignees).to contain_exactly(assignee2)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing "remove" as true' do
+ before do
+ merge_request.assignees = [assignee]
+ merge_request.save!
+ end
+
+ it 'removes named assignee' do
+ mutated_merge_request = mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: assignee_usernames, operation_mode: Types::MutationOperationModeEnum.enum[:remove])[:merge_request]
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.assignees).to eq([])
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'does not remove unnamed assignee' do
+ mutated_merge_request = mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: [assignee2.username], operation_mode: Types::MutationOperationModeEnum.enum[:remove])[:merge_request]
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.assignees).to contain_exactly(assignee)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/merge_requests/set_labels_spec.rb b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
new file mode 100644
index 00000000000..3729251bab7
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::MergeRequests::SetLabels do
+ let(:merge_request) { create(:merge_request) }
+ let(:user) { create(:user) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+
+ describe '#resolve' do
+ let(:label) { create(:label, project: merge_request.project) }
+ let(:label2) { create(:label, project: merge_request.project) }
+ let(:label_ids) { [label.to_global_id] }
+ let(:mutated_merge_request) { subject[:merge_request] }
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, label_ids: label_ids) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the merge request' do
+ before do
+ merge_request.project.add_developer(user)
+ end
+
+ it 'sets the labels, removing all others' do
+ merge_request.update!(labels: [label2])
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.labels).to contain_exactly(label)
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors merge request could not be updated' do
+ # Make the merge request invalid
+ merge_request.allow_broken = true
+ merge_request.update!(source_project: nil)
+
+ expect(subject[:errors]).not_to be_empty
+ end
+
+ context 'when passing an empty array' do
+ let(:label_ids) { [] }
+
+ it 'removes all labels' do
+ merge_request.update!(labels: [label])
+
+ expect(mutated_merge_request.labels).to be_empty
+ end
+ end
+
+ context 'when passing operation_mode as APPEND' do
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, label_ids: label_ids, operation_mode: Types::MutationOperationModeEnum.enum[:append]) }
+
+ it 'sets the labels, without removing others' do
+ merge_request.update!(labels: [label2])
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.labels).to contain_exactly(label, label2)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing operation_mode as REMOVE' do
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, label_ids: label_ids, operation_mode: Types::MutationOperationModeEnum.enum[:remove])}
+
+ it 'removes the labels, without removing others' do
+ merge_request.update!(labels: [label, label2])
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.labels).to contain_exactly(label2)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/merge_requests/set_locked_spec.rb b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
new file mode 100644
index 00000000000..51249854378
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::MergeRequests::SetLocked do
+ let(:merge_request) { create(:merge_request) }
+ let(:user) { create(:user) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+
+ describe '#resolve' do
+ let(:locked) { true }
+ let(:mutated_merge_request) { subject[:merge_request] }
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, locked: locked) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the merge request' do
+ before do
+ merge_request.project.add_developer(user)
+ end
+
+ it 'returns the merge request as discussion locked' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request).to be_discussion_locked
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors merge request could not be updated' do
+ # Make the merge request invalid
+ merge_request.allow_broken = true
+ merge_request.update!(source_project: nil)
+
+ expect(subject[:errors]).not_to be_empty
+ end
+
+ context 'when passing locked as false' do
+ let(:locked) { false }
+
+ it 'unlocks the discussion' do
+ merge_request.update(discussion_locked: true)
+
+ expect(mutated_merge_request).not_to be_discussion_locked
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/merge_requests/set_milestone_spec.rb b/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
new file mode 100644
index 00000000000..c2792a4bc25
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::MergeRequests::SetMilestone do
+ let(:merge_request) { create(:merge_request) }
+ let(:user) { create(:user) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+
+ describe '#resolve' do
+ let(:milestone) { create(:milestone, project: merge_request.project) }
+ let(:mutated_merge_request) { subject[:merge_request] }
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, milestone: milestone) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the merge request' do
+ before do
+ merge_request.project.add_developer(user)
+ end
+
+ it 'returns the merge request with the milestone' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.milestone).to eq(milestone)
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors merge request could not be updated' do
+ # Make the merge request invalid
+ merge_request.allow_broken = true
+ merge_request.update!(source_project: nil)
+
+ expect(subject[:errors]).not_to be_empty
+ end
+
+ context 'when passing milestone_id as nil' do
+ let(:milestone) { nil }
+
+ it 'removes the milestone' do
+ merge_request.update!(milestone: create(:milestone, project: merge_request.project))
+
+ expect(mutated_merge_request.milestone).to eq(nil)
+ end
+
+ it 'does not do anything if the MR already does not have a milestone' do
+ expect(mutated_merge_request.milestone).to eq(nil)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
new file mode 100644
index 00000000000..116a77abcc0
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::MergeRequests::SetSubscription do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:user) { create(:user) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+
+ describe '#resolve' do
+ let(:subscribe) { true }
+ let(:mutated_merge_request) { subject[:merge_request] }
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, subscribed_state: subscribe) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the merge request' do
+ before do
+ merge_request.project.add_developer(user)
+ end
+
+ it 'returns the merge request as discussion locked' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.subscribed?(user, project)).to eq(true)
+ expect(subject[:errors]).to be_empty
+ end
+
+ context 'when passing subscribe as false' do
+ let(:subscribe) { false }
+
+ it 'unsubscribes from the discussion' do
+ merge_request.subscribe(user, project)
+
+ expect(mutated_merge_request.subscribed?(user, project)).to eq(false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/todos/mark_done_spec.rb b/spec/graphql/mutations/todos/mark_done_spec.rb
new file mode 100644
index 00000000000..761b153d5d1
--- /dev/null
+++ b/spec/graphql/mutations/todos/mark_done_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::Todos::MarkDone do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending) }
+ let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done) }
+
+ let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+
+ describe '#resolve' do
+ it 'marks a single todo as done' do
+ result = mark_done_mutation(todo1)
+
+ expect(todo1.reload.state).to eq('done')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+
+ todo = result[:todo]
+ expect(todo.id).to eq(todo1.id)
+ expect(todo.state).to eq('done')
+ end
+
+ it 'handles a todo which is already done as expected' do
+ result = mark_done_mutation(todo2)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+
+ todo = result[:todo]
+ expect(todo.id).to eq(todo2.id)
+ expect(todo.state).to eq('done')
+ end
+
+ it 'ignores requests for todos which do not belong to the current user' do
+ expect { mark_done_mutation(other_user_todo) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+ end
+
+ it 'ignores invalid GIDs' do
+ expect { mutation.resolve(id: 'invalid_gid') }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+ end
+ end
+
+ def mark_done_mutation(todo)
+ mutation.resolve(id: global_id_of(todo))
+ end
+
+ def global_id_of(todo)
+ todo.to_global_id.to_s
+ end
+end
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index c162fdbbb47..a212bd07f35 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -13,6 +13,14 @@ describe Resolvers::BaseResolver do
end
end
+ let(:last_resolver) do
+ Class.new(described_class) do
+ def resolve(**args)
+ [1, 2]
+ end
+ end
+ end
+
describe '.single' do
it 'returns a subclass from the resolver' do
expect(resolver.single.superclass).to eq(resolver)
@@ -29,6 +37,22 @@ describe Resolvers::BaseResolver do
end
end
+ describe '.last' do
+ it 'returns a subclass from the resolver' do
+ expect(last_resolver.last.superclass).to eq(last_resolver)
+ end
+
+ it 'returns the same subclass every time' do
+ expect(last_resolver.last.object_id).to eq(last_resolver.last.object_id)
+ end
+
+ it 'returns a resolver that gives the last result from the original resolver' do
+ result = resolve(last_resolver.last)
+
+ expect(result).to eq(2)
+ end
+ end
+
context 'when field is a connection' do
it 'increases complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE.connection_type, resolver_class: described_class, null: false, max_page_size: 1)
diff --git a/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb b/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb
new file mode 100644
index 00000000000..93da877d714
--- /dev/null
+++ b/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::CommitPipelinesResolver do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let(:commit) { create(:commit, project: project) }
+ let_it_be(:current_user) { create(:user) }
+
+ let!(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ status: 'success'
+ )
+ end
+ let!(:pipeline2) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ status: 'failed'
+ )
+ end
+ let!(:pipeline3) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'my_branch',
+ status: 'failed'
+ )
+ end
+
+ before do
+ commit.project.add_developer(current_user)
+ end
+
+ def resolve_pipelines
+ resolve(described_class, obj: commit, ctx: { current_user: current_user }, args: { ref: 'master' })
+ end
+
+ it 'resolves pipelines for commit and ref' do
+ pipelines = resolve_pipelines
+
+ expect(pipelines).to eq([pipeline2, pipeline])
+ end
+end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 2232c9b7d7b..bf9106643eb 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -72,8 +72,46 @@ describe Resolvers::IssuesResolver do
expect(resolve_issues(search: 'foo')).to contain_exactly(issue2)
end
- it 'sort issues' do
- expect(resolve_issues(sort: 'created_desc')).to eq [issue2, issue1]
+ describe 'sorting' do
+ context 'when sorting by created' do
+ it 'sorts issues ascending' do
+ expect(resolve_issues(sort: 'created_asc')).to eq [issue1, issue2]
+ end
+
+ it 'sorts issues descending' do
+ expect(resolve_issues(sort: 'created_desc')).to eq [issue2, issue1]
+ end
+ end
+
+ context 'when sorting by due date' do
+ let(:project) { create(:project) }
+
+ let!(:due_issue1) { create(:issue, project: project, due_date: 3.days.from_now) }
+ let!(:due_issue2) { create(:issue, project: project, due_date: nil) }
+ let!(:due_issue3) { create(:issue, project: project, due_date: 2.days.ago) }
+ let!(:due_issue4) { create(:issue, project: project, due_date: nil) }
+
+ it 'sorts issues ascending' do
+ expect(resolve_issues(sort: :due_date_asc)).to eq [due_issue3, due_issue1, due_issue4, due_issue2]
+ end
+
+ it 'sorts issues descending' do
+ expect(resolve_issues(sort: :due_date_desc)).to eq [due_issue1, due_issue3, due_issue4, due_issue2]
+ end
+ end
+
+ context 'when sorting by relative position' do
+ let(:project) { create(:project) }
+
+ let!(:relative_issue1) { create(:issue, project: project, relative_position: 2000) }
+ let!(:relative_issue2) { create(:issue, project: project, relative_position: nil) }
+ let!(:relative_issue3) { create(:issue, project: project, relative_position: 1000) }
+ let!(:relative_issue4) { create(:issue, project: project, relative_position: nil) }
+
+ it 'sorts issues ascending' do
+ expect(resolve_issues(sort: :relative_position_asc)).to eq [relative_issue3, relative_issue1, relative_issue4, relative_issue2]
+ end
+ end
end
it 'returns issues user can see' do
diff --git a/spec/graphql/types/base_enum_spec.rb b/spec/graphql/types/base_enum_spec.rb
new file mode 100644
index 00000000000..3eadb492cf5
--- /dev/null
+++ b/spec/graphql/types/base_enum_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Types::BaseEnum do
+ describe '#enum' do
+ let(:enum) do
+ Class.new(described_class) do
+ value 'TEST', value: 3
+ value 'other'
+ value 'NORMAL'
+ end
+ end
+
+ it 'adds all enum values to #enum' do
+ expect(enum.enum.keys).to contain_exactly('test', 'other', 'normal')
+ expect(enum.enum.values).to contain_exactly(3, 'other', 'NORMAL')
+ end
+
+ it 'is a HashWithIndefferentAccess' do
+ expect(enum.enum).to be_a(HashWithIndifferentAccess)
+ end
+ end
+end
diff --git a/spec/graphql/types/commit_type_spec.rb b/spec/graphql/types/commit_type_spec.rb
index 1ff1c97f8db..1c3b46ecfde 100644
--- a/spec/graphql/types/commit_type_spec.rb
+++ b/spec/graphql/types/commit_type_spec.rb
@@ -10,7 +10,7 @@ describe GitlabSchema.types['Commit'] do
it 'contains attributes related to commit' do
expect(described_class).to have_graphql_fields(
:id, :sha, :title, :description, :message, :authored_date,
- :author, :web_url, :latest_pipeline, :signature_html
+ :author_name, :author, :web_url, :latest_pipeline, :pipelines, :signature_html
)
end
end
diff --git a/spec/graphql/types/extended_issue_type_spec.rb b/spec/graphql/types/extended_issue_type_spec.rb
deleted file mode 100644
index 72ce53ae1be..00000000000
--- a/spec/graphql/types/extended_issue_type_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe GitlabSchema.types['ExtendedIssue'] do
- it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Issue) }
-
- it { expect(described_class.graphql_name).to eq('ExtendedIssue') }
-
- it { expect(described_class).to require_graphql_authorizations(:read_issue) }
-
- it { expect(described_class.interfaces).to include(Types::Notes::NoteableType.to_graphql) }
-
- it 'has specific fields' do
- fields = Types::IssueType.fields.keys + [:subscribed]
-
- fields.each do |field_name|
- expect(described_class).to have_graphql_field(field_name)
- end
- end
-end
diff --git a/spec/graphql/types/issue_sort_enum_spec.rb b/spec/graphql/types/issue_sort_enum_spec.rb
new file mode 100644
index 00000000000..1b6aa6d6069
--- /dev/null
+++ b/spec/graphql/types/issue_sort_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['IssueSort'] do
+ it { expect(described_class.graphql_name).to eq('IssueSort') }
+
+ it_behaves_like 'common sort values'
+
+ it 'exposes all the existing issue sort values' do
+ expect(described_class.values.keys).to include(*%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC])
+ end
+end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 8aa2385ddaa..daa2224ef20 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -14,7 +14,7 @@ describe GitlabSchema.types['Issue'] do
it 'has specific fields' do
fields = %i[iid title description state reference author assignees participants labels milestone due_date
confidential discussion_locked upvotes downvotes user_notes_count web_path web_url relative_position
- time_estimate total_time_spent closed_at created_at updated_at task_completion_status]
+ subscribed time_estimate total_time_spent closed_at created_at updated_at task_completion_status]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
diff --git a/spec/graphql/types/label_type_spec.rb b/spec/graphql/types/label_type_spec.rb
index 8e7b2c69eff..a023a75eeff 100644
--- a/spec/graphql/types/label_type_spec.rb
+++ b/spec/graphql/types/label_type_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe GitlabSchema.types['Label'] do
it 'has the correct fields' do
- expected_fields = [:description, :description_html, :title, :color, :text_color]
+ expected_fields = [:id, :description, :description_html, :title, :color, :text_color]
is_expected.to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index cfd0f8ec7a7..19a433f090e 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -23,6 +23,7 @@ describe GitlabSchema.types['Project'] do
only_allow_merge_if_all_discussions_are_resolved printing_merge_request_link_enabled
namespace group statistics repository merge_requests merge_request issues
issue pipelines
+ removeSourceBranchAfterMerge
]
is_expected.to have_graphql_fields(*expected_fields)
@@ -32,7 +33,7 @@ describe GitlabSchema.types['Project'] do
subject { described_class.fields['issue'] }
it 'returns issue' do
- is_expected.to have_graphql_type(Types::ExtendedIssueType)
+ is_expected.to have_graphql_type(Types::IssueType)
is_expected.to have_graphql_resolver(Resolvers::IssuesResolver.single)
end
end
diff --git a/spec/graphql/types/tree/blob_type_spec.rb b/spec/graphql/types/tree/blob_type_spec.rb
index 22c11aff90a..516c862b9c6 100644
--- a/spec/graphql/types/tree/blob_type_spec.rb
+++ b/spec/graphql/types/tree/blob_type_spec.rb
@@ -5,5 +5,5 @@ require 'spec_helper'
describe Types::Tree::BlobType do
it { expect(described_class.graphql_name).to eq('Blob') }
- it { expect(described_class).to have_graphql_fields(:id, :name, :type, :path, :flat_path, :web_url, :lfs_oid) }
+ it { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url, :lfs_oid) }
end
diff --git a/spec/graphql/types/tree/submodule_type_spec.rb b/spec/graphql/types/tree/submodule_type_spec.rb
index 768eccba68c..81f7ad825a1 100644
--- a/spec/graphql/types/tree/submodule_type_spec.rb
+++ b/spec/graphql/types/tree/submodule_type_spec.rb
@@ -5,5 +5,5 @@ require 'spec_helper'
describe Types::Tree::SubmoduleType do
it { expect(described_class.graphql_name).to eq('Submodule') }
- it { expect(described_class).to have_graphql_fields(:id, :name, :type, :path, :flat_path, :web_url, :tree_url) }
+ it { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url, :tree_url) }
end
diff --git a/spec/graphql/types/tree/tree_entry_type_spec.rb b/spec/graphql/types/tree/tree_entry_type_spec.rb
index ea1b6426034..228a4be0949 100644
--- a/spec/graphql/types/tree/tree_entry_type_spec.rb
+++ b/spec/graphql/types/tree/tree_entry_type_spec.rb
@@ -5,5 +5,5 @@ require 'spec_helper'
describe Types::Tree::TreeEntryType do
it { expect(described_class.graphql_name).to eq('TreeEntry') }
- it { expect(described_class).to have_graphql_fields(:id, :name, :type, :path, :flat_path, :web_url) }
+ it { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url) }
end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index e8c438e459b..d3d25d3cb74 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -210,7 +210,9 @@ describe ApplicationHelper do
let(:user) { create(:user, static_object_token: 'hunter1') }
before do
- allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_url).and_return('https://cdn.gitlab.com')
+ allow_next_instance_of(ApplicationSetting) do |instance|
+ allow(instance).to receive(:static_objects_external_storage_url).and_return('https://cdn.gitlab.com')
+ end
allow(helper).to receive(:current_user).and_return(user)
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 705523f1110..8303c4eafbe 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -36,4 +36,27 @@ describe ApplicationSettingsHelper do
it_behaves_like 'when HTTP protocol is in use', 'https'
it_behaves_like 'when HTTP protocol is in use', 'http'
+
+ context 'with tracking parameters' do
+ it { expect(visible_attributes).to include(*%i(snowplow_collector_hostname snowplow_cookie_domain snowplow_enabled snowplow_app_id)) }
+ end
+
+ describe '.integration_expanded?' do
+ let(:application_setting) { build(:application_setting) }
+
+ it 'is expanded' do
+ application_setting.plantuml_enabled = true
+ application_setting.valid?
+ helper.instance_variable_set(:@application_setting, application_setting)
+
+ expect(helper.integration_expanded?('plantuml_')).to be_truthy
+ end
+
+ it 'is not expanded' do
+ application_setting.valid?
+ helper.instance_variable_set(:@application_setting, application_setting)
+
+ expect(helper.integration_expanded?('plantuml_')).to be_falsey
+ end
+ end
end
diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb
index aae515def0c..cb7c670198d 100644
--- a/spec/helpers/auth_helper_spec.rb
+++ b/spec/helpers/auth_helper_spec.rb
@@ -54,6 +54,23 @@ describe AuthHelper do
end
end
+ describe 'any_form_based_providers_enabled?' do
+ before do
+ allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ end
+
+ it 'detects form-based providers' do
+ allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
+ expect(helper.any_form_based_providers_enabled?).to be(true)
+ end
+
+ it 'ignores ldap providers when ldap web sign in is disabled' do
+ allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
+ allow(helper).to receive(:ldap_sign_in_enabled?).and_return(false)
+ expect(helper.any_form_based_providers_enabled?).to be(false)
+ end
+ end
+
describe 'enabled_button_based_providers' do
before do
allow(helper).to receive(:auth_providers) { [:twitter, :github] }
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 4ea0f76fc28..1ee638ddf04 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -30,4 +30,60 @@ describe ClustersHelper do
end
end
end
+
+ describe '#create_new_cluster_label' do
+ subject { helper.create_new_cluster_label(provider: provider) }
+
+ context 'GCP provider' do
+ let(:provider) { 'gcp' }
+
+ it { is_expected.to eq('Create new Cluster on GKE') }
+ end
+
+ context 'AWS provider' do
+ let(:provider) { 'aws' }
+
+ it { is_expected.to eq('Create new Cluster on EKS') }
+ end
+
+ context 'other provider' do
+ let(:provider) { 'other' }
+
+ it { is_expected.to eq('Create new Cluster') }
+ end
+
+ context 'no provider' do
+ let(:provider) { nil }
+
+ it { is_expected.to eq('Create new Cluster') }
+ end
+ end
+
+ describe '#render_new_provider_form' do
+ subject { helper.new_cluster_partial(provider: provider) }
+
+ context 'GCP provider' do
+ let(:provider) { 'gcp' }
+
+ it { is_expected.to eq('clusters/clusters/gcp/new') }
+ end
+
+ context 'AWS provider' do
+ let(:provider) { 'aws' }
+
+ it { is_expected.to eq('clusters/clusters/aws/new') }
+ end
+
+ context 'other provider' do
+ let(:provider) { 'other' }
+
+ it { is_expected.to eq('clusters/clusters/cloud_providers/cloud_provider_selector') }
+ end
+
+ context 'no provider' do
+ let(:provider) { nil }
+
+ it { is_expected.to eq('clusters/clusters/cloud_providers/cloud_provider_selector') }
+ end
+ end
end
diff --git a/spec/helpers/dashboard_helper_spec.rb b/spec/helpers/dashboard_helper_spec.rb
index c899c2d9853..8a4ea33ac7c 100644
--- a/spec/helpers/dashboard_helper_spec.rb
+++ b/spec/helpers/dashboard_helper_spec.rb
@@ -25,39 +25,62 @@ describe DashboardHelper do
end
describe '#feature_entry' do
- context 'when implicitly enabled' do
- it 'considers feature enabled by default' do
- entry = feature_entry('Demo', href: 'demo.link')
+ shared_examples "a feature is enabled" do
+ it { is_expected.to include('<p aria-label="Demo: status on">') }
+ end
+
+ shared_examples "a feature is disabled" do
+ it { is_expected.to include('<p aria-label="Demo: status off">') }
+ end
- expect(entry).to include('<p aria-label="Demo: status on">')
- expect(entry).to include('<a href="demo.link">Demo</a>')
+ shared_examples "a feature without link" do
+ it do
+ is_expected.not_to have_link('Demo')
+ is_expected.not_to have_link('Documentation')
end
end
+ shared_examples "a feature with configuration" do
+ it { is_expected.to have_link('Demo', href: 'demo.link') }
+ end
+
+ shared_examples "a feature with documentation" do
+ it { is_expected.to have_link('Documentation', href: 'doc.link') }
+ end
+
+ context 'when implicitly enabled' do
+ subject { feature_entry('Demo') }
+
+ it_behaves_like 'a feature is enabled'
+ end
+
context 'when explicitly enabled' do
- it 'returns a link' do
- entry = feature_entry('Demo', href: 'demo.link', enabled: true)
+ context 'without links' do
+ subject { feature_entry('Demo', enabled: true) }
- expect(entry).to include('<p aria-label="Demo: status on">')
- expect(entry).to include('<a href="demo.link">Demo</a>')
+ it_behaves_like 'a feature is enabled'
+ it_behaves_like 'a feature without link'
end
- it 'returns text if href is not provided' do
- entry = feature_entry('Demo', enabled: true)
+ context 'with configure link' do
+ subject { feature_entry('Demo', href: 'demo.link', enabled: true) }
- expect(entry).to include('<p aria-label="Demo: status on">')
- expect(entry).not_to match(/<a[^>]+>/)
+ it_behaves_like 'a feature with configuration'
+ end
+
+ context 'with configure and documentation links' do
+ subject { feature_entry('Demo', href: 'demo.link', doc_href: 'doc.link', enabled: true) }
+
+ it_behaves_like 'a feature with configuration'
+ it_behaves_like 'a feature with documentation'
end
end
context 'when disabled' do
- it 'returns text without link' do
- entry = feature_entry('Demo', href: 'demo.link', enabled: false)
+ subject { feature_entry('Demo', href: 'demo.link', enabled: false) }
- expect(entry).to include('<p aria-label="Demo: status off">')
- expect(entry).not_to match(/<a[^>]+>/)
- expect(entry).to include('Demo')
- end
+ it_behaves_like 'a feature is disabled'
+ it_behaves_like 'a feature without link'
end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 2b8bf9319fc..a50c8e9bf8e 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -32,6 +32,7 @@ describe EnvironmentsHelper do
'project-path' => project_path(project),
'tags-path' => project_tags_path(project),
'has-metrics' => "#{environment.has_metrics?}",
+ 'prometheus-status' => "#{environment.prometheus_status}",
'external-dashboard-url' => nil
)
end
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index bf043f3f013..38699108b06 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -75,6 +75,12 @@ describe GitlabRoutingHelper do
expect(preview_markdown_path(group)).to eq("/groups/#{group.path}/preview_markdown")
end
+ it 'returns group preview markdown path for a group parent with args' do
+ group = create(:group)
+
+ expect(preview_markdown_path(group, { type_id: 5 })).to eq("/groups/#{group.path}/preview_markdown?type_id=5")
+ end
+
it 'returns project preview markdown path for a project parent' do
expect(preview_markdown_path(project)).to eq("/#{project.full_path}/preview_markdown")
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 2f67ea457a0..1af8b7390bb 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -203,42 +203,53 @@ describe IssuablesHelper do
end
describe '#zoomMeetingUrl in issue' do
- let(:issue) { create(:issue, author: user, description: description) }
+ let(:issue) { create(:issue, author: user) }
before do
assign(:project, issue.project)
end
- context 'no zoom links in the issue description' do
- let(:description) { 'issue text' }
-
- it 'does not set zoomMeetingUrl' do
- expect(helper.issuable_initial_data(issue))
- .not_to include(:zoomMeetingUrl)
+ shared_examples 'sets zoomMeetingUrl to nil' do
+ specify do
+ expect(helper.issuable_initial_data(issue)[:zoomMeetingUrl])
+ .to be_nil
end
end
- context 'no zoom links in the issue description if it has link but not a zoom link' do
- let(:description) { 'issue text https://stackoverflow.com/questions/22' }
+ context 'with no "added" zoom mettings' do
+ it_behaves_like 'sets zoomMeetingUrl to nil'
+
+ context 'with multiple removed meetings' do
+ before do
+ create(:zoom_meeting, issue: issue, issue_status: :removed)
+ create(:zoom_meeting, issue: issue, issue_status: :removed)
+ end
- it 'does not set zoomMeetingUrl' do
- expect(helper.issuable_initial_data(issue))
- .not_to include(:zoomMeetingUrl)
+ it_behaves_like 'sets zoomMeetingUrl to nil'
end
end
- context 'with two zoom links in description' do
- let(:description) do
- <<~TEXT
- issue text and
- zoom call on https://zoom.us/j/123456789 this url
- and new zoom url https://zoom.us/s/lastone and some more text
- TEXT
+ context 'with "added" zoom meeting' do
+ before do
+ create(:zoom_meeting, issue: issue)
end
- it 'sets zoomMeetingUrl value to the last url' do
- expect(helper.issuable_initial_data(issue))
- .to include(zoomMeetingUrl: 'https://zoom.us/s/lastone')
+ shared_examples 'sets zoomMeetingUrl to canonical meeting url' do
+ specify do
+ expect(helper.issuable_initial_data(issue))
+ .to include(zoomMeetingUrl: 'https://zoom.us/j/123456789')
+ end
+ end
+
+ it_behaves_like 'sets zoomMeetingUrl to canonical meeting url'
+
+ context 'with muliple "removed" zoom meetings' do
+ before do
+ create(:zoom_meeting, issue: issue, issue_status: :removed)
+ create(:zoom_meeting, issue: issue, issue_status: :removed)
+ end
+
+ it_behaves_like 'sets zoomMeetingUrl to canonical meeting url'
end
end
end
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 32851249b2e..5ca5f5703cf 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -89,6 +89,35 @@ describe MarkupHelper do
end
end
end
+
+ context 'when text contains a relative link to an image in the repository' do
+ let(:image_file) { "logo-white.png" }
+ let(:text_with_relative_path) { "![](./#{image_file})\n" }
+ let(:generated_html) { helper.markdown(text_with_relative_path, requested_path: requested_path) }
+
+ subject { Nokogiri::HTML.parse(generated_html) }
+
+ context 'when requested_path is provided in the context' do
+ let(:requested_path) { 'files/images/README.md' }
+
+ it 'returns the correct HTML for the image' do
+ expanded_path = "/#{project.full_path}/raw/master/files/images/#{image_file}"
+
+ expect(subject.css('a')[0].attr('href')).to eq(expanded_path)
+ expect(subject.css('img')[0].attr('data-src')).to eq(expanded_path)
+ end
+ end
+
+ context 'when requested_path parameter is not provided' do
+ let(:requested_path) { nil }
+
+ it 'returns the link to the image path as a relative path' do
+ expanded_path = "/#{project.full_path}/master/./#{image_file}"
+
+ expect(subject.css('a')[0].attr('href')).to eq(expanded_path)
+ end
+ end
+ end
end
describe '#markdown_field' do
@@ -210,7 +239,7 @@ describe MarkupHelper do
it 'replaces commit message with emoji to link' do
actual = link_to_markdown(':book: Book', '/foo')
expect(actual)
- .to eq '<gl-emoji title="open book" data-name="book" data-unicode-version="6.0">📖</gl-emoji><a href="/foo"> Book</a>'
+ .to eq '<a href="/foo"><gl-emoji title="open book" data-name="book" data-unicode-version="6.0">📖</gl-emoji></a><a href="/foo"> Book</a>'
end
end
@@ -232,6 +261,12 @@ describe MarkupHelper do
expect(doc.css('a')[0].attr('href')).to eq link
expect(doc.css('a')[0].text).to eq 'This should finally fix '
end
+
+ it "escapes HTML passed as an emoji" do
+ rendered = '<gl-emoji>&lt;div class="test"&gt;test&lt;/div&gt;</gl-emoji>'
+ expect(helper.link_to_html(rendered, '/foo'))
+ .to eq '<a href="/foo"><gl-emoji>&lt;div class="test"&gt;test&lt;/div&gt;</gl-emoji></a>'
+ end
end
describe '#render_wiki_content' do
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 1fa3c639603..cd1b1f91e9f 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -938,4 +938,22 @@ describe ProjectsHelper do
it { is_expected.to eq(grafana_integration.token) }
end
end
+
+ describe '#grafana_integration_enabled?' do
+ let(:project) { create(:project) }
+
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+
+ subject { helper.grafana_integration_enabled? }
+
+ it { is_expected.to eq(nil) }
+
+ context 'grafana integration exists' do
+ let!(:grafana_integration) { create(:grafana_integration, project: project) }
+
+ it { is_expected.to eq(grafana_integration.enabled) }
+ end
+ end
end
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index 3b4973677ef..3f56c189642 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -17,9 +17,11 @@ describe ReleasesHelper do
context 'url helpers' do
let(:project) { build(:project, namespace: create(:group)) }
+ let(:release) { create(:release, project: project) }
before do
helper.instance_variable_set(:@project, project)
+ helper.instance_variable_set(:@release, release)
end
describe '#data_for_releases_page' do
@@ -28,5 +30,17 @@ describe ReleasesHelper do
expect(helper.data_for_releases_page.keys).to eq(keys)
end
end
+
+ describe '#data_for_edit_release_page' do
+ it 'has the needed data to display the "edit release" page' do
+ keys = %i(project_id
+ tag_name
+ markdown_preview_path
+ markdown_docs_path
+ releases_page_path
+ update_release_api_docs_path)
+ expect(helper.data_for_edit_release_page.keys).to eq(keys)
+ end
+ end
end
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 9e9f87b3407..bef6fbe3d5f 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -167,6 +167,7 @@ describe SearchHelper do
expect(search_filter_input_options('')[:data]['runner-tags-endpoint']).to eq(tag_list_admin_runners_path)
expect(search_filter_input_options('')[:data]['labels-endpoint']).to eq(project_labels_path(@project))
expect(search_filter_input_options('')[:data]['milestones-endpoint']).to eq(project_milestones_path(@project))
+ expect(search_filter_input_options('')[:data]['releases-endpoint']).to eq(project_releases_path(@project))
end
it 'includes autocomplete=off flag' do
@@ -271,4 +272,50 @@ describe SearchHelper do
expect(link).to have_css('li[data-foo="bar"]')
end
end
+
+ describe '#show_user_search_tab?' do
+ subject { show_user_search_tab? }
+
+ context 'when users_search feature is disabled' do
+ before do
+ stub_feature_flags(users_search: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when project search' do
+ before do
+ @project = :some_project
+
+ expect(self).to receive(:project_search_tabs?)
+ .with(:members)
+ .and_return(:value)
+ end
+
+ it 'delegates to project_search_tabs?' do
+ expect(subject).to eq(:value)
+ end
+ end
+
+ context 'when not project search' do
+ context 'when current_user can read_users_list' do
+ before do
+ allow(self).to receive(:current_user).and_return(:the_current_user)
+ allow(self).to receive(:can?).with(:the_current_user, :read_users_list).and_return(true)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when current_user cannot read_users_list' do
+ before do
+ allow(self).to receive(:current_user).and_return(:the_current_user)
+ allow(self).to receive(:can?).with(:the_current_user, :read_users_list).and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index 66c8d576a4c..d88e151a11c 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -3,33 +3,217 @@
require 'spec_helper'
describe SnippetsHelper do
+ include Gitlab::Routing
include IconsHelper
- describe '#embedded_snippet_raw_button' do
- it 'gives view raw button of embedded snippets for project snippets' do
- @snippet = create(:project_snippet, :public)
+ let_it_be(:public_personal_snippet) { create(:personal_snippet, :public) }
+ let_it_be(:public_project_snippet) { create(:project_snippet, :public) }
+
+ describe '#reliable_snippet_path' do
+ subject { reliable_snippet_path(snippet) }
+
+ context 'personal snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ context 'public' do
+ it 'returns a full path' do
+ expect(subject).to eq("/snippets/#{snippet.id}")
+ end
+ end
+ end
+
+ context 'project snippets' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns a full path' do
+ expect(subject).to eq("/#{snippet.project.full_path}/snippets/#{snippet.id}")
+ end
+ end
+ end
+
+ describe '#reliable_snippet_url' do
+ subject { reliable_snippet_url(snippet) }
+
+ context 'personal snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ context 'public' do
+ it 'returns a full url' do
+ expect(subject).to eq("http://test.host/snippets/#{snippet.id}")
+ end
+ end
+ end
+
+ context 'project snippets' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns a full url' do
+ expect(subject).to eq("http://test.host/#{snippet.project.full_path}/snippets/#{snippet.id}")
+ end
+ end
+ end
+
+ describe '#reliable_raw_snippet_path' do
+ subject { reliable_raw_snippet_path(snippet) }
+
+ context 'personal snippets' do
+ let(:snippet) { public_personal_snippet }
- expect(embedded_snippet_raw_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{raw_project_snippet_url(@snippet.project, @snippet)}\">#{external_snippet_icon('doc-code')}</a>")
+ context 'public' do
+ it 'returns a full path' do
+ expect(subject).to eq("/snippets/#{snippet.id}/raw")
+ end
+ end
end
- it 'gives view raw button of embedded snippets for personal snippets' do
+ context 'project snippets' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns a full path' do
+ expect(subject).to eq("/#{snippet.project.full_path}/snippets/#{snippet.id}/raw")
+ end
+ end
+ end
+
+ describe '#reliable_raw_snippet_url' do
+ subject { reliable_raw_snippet_url(snippet) }
+
+ context 'personal snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ context 'public' do
+ it 'returns a full url' do
+ expect(subject).to eq("http://test.host/snippets/#{snippet.id}/raw")
+ end
+ end
+ end
+
+ context 'project snippets' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns a full url' do
+ expect(subject).to eq("http://test.host/#{snippet.project.full_path}/snippets/#{snippet.id}/raw")
+ end
+ end
+ end
+
+ describe '#embedded_raw_snippet_button' do
+ subject { embedded_raw_snippet_button.to_s }
+
+ it 'returns view raw button of embedded snippets for personal snippets' do
@snippet = create(:personal_snippet, :public)
- expect(embedded_snippet_raw_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{raw_snippet_url(@snippet)}\">#{external_snippet_icon('doc-code')}</a>")
+ expect(subject).to eq(download_link("http://test.host/snippets/#{@snippet.id}/raw"))
+ end
+
+ it 'returns view raw button of embedded snippets for project snippets' do
+ @snippet = create(:project_snippet, :public)
+
+ expect(subject).to eq(download_link("http://test.host/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw"))
+ end
+
+ def download_link(url)
+ "<a class=\"btn\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{url}\">#{external_snippet_icon('doc-code')}</a>"
end
end
describe '#embedded_snippet_download_button' do
- it 'gives download button of embedded snippets for project snippets' do
+ subject { embedded_snippet_download_button }
+
+ it 'returns download button of embedded snippets for personal snippets' do
+ @snippet = create(:personal_snippet, :public)
+
+ expect(subject).to eq(download_link("http://test.host/snippets/#{@snippet.id}/raw"))
+ end
+
+ it 'returns download button of embedded snippets for project snippets' do
@snippet = create(:project_snippet, :public)
- expect(embedded_snippet_download_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{raw_project_snippet_url(@snippet.project, @snippet, inline: false)}\">#{external_snippet_icon('download')}</a>")
+ expect(subject).to eq(download_link("http://test.host/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw"))
end
- it 'gives download button of embedded snippets for personal snippets' do
- @snippet = create(:personal_snippet, :public)
+ def download_link(url)
+ "<a class=\"btn\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{url}?inline=false\">#{external_snippet_icon('download')}</a>"
+ end
+ end
+
+ describe '#snippet_embed_tag' do
+ subject { snippet_embed_tag(snippet) }
+
+ context 'personal snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ context 'public' do
+ it 'returns a script tag with the snippet full url' do
+ expect(subject).to eq(script_embed("http://test.host/snippets/#{snippet.id}"))
+ end
+ end
+ end
+
+ context 'project snippets' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns a script tag with the snippet full url' do
+ expect(subject).to eq(script_embed("http://test.host/#{snippet.project.path_with_namespace}/snippets/#{snippet.id}"))
+ end
+ end
+
+ def script_embed(url)
+ "<script src=\"#{url}.js\"></script>"
+ end
+ end
+
+ describe '#download_raw_snippet_button' do
+ subject { download_raw_snippet_button(snippet) }
+
+ context 'with personal snippet' do
+ let(:snippet) { public_personal_snippet }
+
+ it 'returns the download button' do
+ expect(subject).to eq(download_link("/snippets/#{snippet.id}/raw"))
+ end
+ end
+
+ context 'with project snippet' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns the download button' do
+ expect(subject).to eq(download_link("/#{snippet.project.path_with_namespace}/snippets/#{snippet.id}/raw"))
+ end
+ end
+
+ def download_link(url)
+ "<a target=\"_blank\" rel=\"noopener noreferrer\" class=\"btn btn-sm has-tooltip\" title=\"Download\" data-container=\"body\" href=\"#{url}?inline=false\"><i aria-hidden=\"true\" data-hidden=\"true\" class=\"fa fa-download\"></i></a>"
+ end
+ end
+
+ describe '#snippet_badge' do
+ let(:snippet) { build(:personal_snippet, visibility) }
+
+ subject { snippet_badge(snippet) }
+
+ context 'when snippet is private' do
+ let(:visibility) { :private }
+
+ it 'returns the snippet badge' do
+ expect(subject).to eq "<span class=\"badge badge-gray\"><i class=\"fa fa-lock\"></i> private</span>"
+ end
+ end
+
+ context 'when snippet is public' do
+ let(:visibility) { :public }
+
+ it 'does not return anything' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when snippet is internal' do
+ let(:visibility) { :internal }
- expect(embedded_snippet_download_button.to_s).to eq("<a class=\"btn\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{raw_snippet_url(@snippet, inline: false)}\">#{external_snippet_icon('download')}</a>")
+ it 'does not return anything' do
+ expect(subject).to be_nil
+ end
end
end
end
diff --git a/spec/helpers/sourcegraph_helper_spec.rb b/spec/helpers/sourcegraph_helper_spec.rb
new file mode 100644
index 00000000000..830bbb3129f
--- /dev/null
+++ b/spec/helpers/sourcegraph_helper_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SourcegraphHelper do
+ describe '#sourcegraph_url_message' do
+ let(:sourcegraph_url) { 'http://sourcegraph.example.com' }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(sourcegraph_url)
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url_is_com?).and_return(is_com)
+ end
+
+ subject { helper.sourcegraph_url_message }
+
+ context 'with .com sourcegraph url' do
+ let(:is_com) { true }
+
+ it { is_expected.to have_text('Uses Sourcegraph.com') }
+ it { is_expected.to have_link('Sourcegraph.com', href: sourcegraph_url) }
+ end
+
+ context 'with custom sourcegraph url' do
+ let(:is_com) { false }
+
+ it { is_expected.to have_text('Uses a custom Sourcegraph instance') }
+ it { is_expected.to have_link('Sourcegraph instance', href: sourcegraph_url) }
+
+ context 'with unsafe url' do
+ let(:sourcegraph_url) { '\" onload=\"alert(1);\"' }
+
+ it { is_expected.to have_link('Sourcegraph instance', href: sourcegraph_url) }
+ end
+ end
+ end
+
+ context '#sourcegraph_experimental_message' do
+ let(:feature_conditional) { false }
+ let(:public_only) { false }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_public_only).and_return(public_only)
+ allow(Gitlab::Sourcegraph).to receive(:feature_conditional?).and_return(feature_conditional)
+ end
+
+ subject { helper.sourcegraph_experimental_message }
+
+ context 'when not limited by feature or public only' do
+ it { is_expected.to eq "This feature is experimental." }
+ end
+
+ context 'when limited by feature' do
+ let(:feature_conditional) { true }
+
+ it { is_expected.to eq "This feature is experimental and currently limited to certain projects." }
+ end
+
+ context 'when limited by public only' do
+ let(:public_only) { true }
+
+ it { is_expected.to eq "This feature is experimental and limited to public projects." }
+ end
+ end
+end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 59abe8c09e1..172ead158fb 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -76,6 +76,10 @@ describe UsersHelper do
allow(helper).to receive(:can?).and_return(false)
end
+ after do
+ expect(items).not_to include(:start_trial)
+ end
+
it 'includes all default items' do
expect(items).to include(:help, :sign_out)
end
diff --git a/spec/initializers/6_validations_spec.rb b/spec/initializers/6_validations_spec.rb
index 73fbd4c7a44..248f967311b 100644
--- a/spec/initializers/6_validations_spec.rb
+++ b/spec/initializers/6_validations_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require_relative '../../config/initializers/6_validations.rb'
diff --git a/spec/initializers/action_mailer_hooks_spec.rb b/spec/initializers/action_mailer_hooks_spec.rb
index 3826ed9b00a..ce6e1ed0fa2 100644
--- a/spec/initializers/action_mailer_hooks_spec.rb
+++ b/spec/initializers/action_mailer_hooks_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'ActionMailer hooks' do
diff --git a/spec/initializers/asset_proxy_setting_spec.rb b/spec/initializers/asset_proxy_setting_spec.rb
index 42e4d4aa594..7eab5de155b 100644
--- a/spec/initializers/asset_proxy_setting_spec.rb
+++ b/spec/initializers/asset_proxy_setting_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Asset proxy settings initialization' do
diff --git a/spec/initializers/attr_encrypted_no_db_connection_spec.rb b/spec/initializers/attr_encrypted_no_db_connection_spec.rb
index 2da9f1cbd96..14e0e1f2167 100644
--- a/spec/initializers/attr_encrypted_no_db_connection_spec.rb
+++ b/spec/initializers/attr_encrypted_no_db_connection_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'GitLab monkey-patches to AttrEncrypted' do
diff --git a/spec/initializers/database_config_spec.rb b/spec/initializers/database_config_spec.rb
new file mode 100644
index 00000000000..a5a074f5884
--- /dev/null
+++ b/spec/initializers/database_config_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Database config initializer' do
+ subject do
+ load Rails.root.join('config/initializers/database_config.rb')
+ end
+
+ before do
+ allow(ActiveRecord::Base).to receive(:establish_connection)
+ end
+
+ context "when using Puma" do
+ let(:puma) { double('puma') }
+ let(:puma_options) { { max_threads: 8 } }
+
+ before do
+ stub_const("Puma", puma)
+ allow(puma).to receive_message_chain(:cli_config, :options).and_return(puma_options)
+ end
+
+ context "and no existing pool size is set" do
+ before do
+ stub_database_config(pool_size: nil)
+ end
+
+ it "sets it to the max number of worker threads" do
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.from(nil).to(8)
+ end
+ end
+
+ context "and the existing pool size is smaller than the max number of worker threads" do
+ before do
+ stub_database_config(pool_size: 7)
+ end
+
+ it "sets it to the max number of worker threads" do
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.from(7).to(8)
+ end
+ end
+
+ context "and the existing pool size is larger than the max number of worker threads" do
+ before do
+ stub_database_config(pool_size: 9)
+ end
+
+ it "keeps the configured pool size" do
+ expect { subject }.not_to change { Gitlab::Database.config['pool'] }
+ end
+ end
+ end
+
+ context "when not using Puma" do
+ before do
+ stub_database_config(pool_size: 7)
+ end
+
+ it "does nothing" do
+ expect { subject }.not_to change { Gitlab::Database.config['pool'] }
+ end
+ end
+
+ def stub_database_config(pool_size:)
+ config = {
+ 'adapter' => 'postgresql',
+ 'host' => 'db.host.com',
+ 'pool' => pool_size
+ }.compact
+
+ allow(Gitlab::Database).to receive(:config).and_return(config)
+ end
+end
diff --git a/spec/initializers/direct_upload_support_spec.rb b/spec/initializers/direct_upload_support_spec.rb
index e51d404e030..4b3fe871cef 100644
--- a/spec/initializers/direct_upload_support_spec.rb
+++ b/spec/initializers/direct_upload_support_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Direct upload support' do
diff --git a/spec/initializers/doorkeeper_spec.rb b/spec/initializers/doorkeeper_spec.rb
index 1a78196e33d..47c196cb3a3 100644
--- a/spec/initializers/doorkeeper_spec.rb
+++ b/spec/initializers/doorkeeper_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require_relative '../../config/initializers/doorkeeper'
diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb
index 08346b71fee..8a0d7ad8f15 100644
--- a/spec/initializers/fog_google_https_private_urls_spec.rb
+++ b/spec/initializers/fog_google_https_private_urls_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Fog::Storage::GoogleXML::File', :fog_requests do
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index c2c1960eeab..9267231390d 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -68,4 +68,52 @@ describe 'lograge', type: :request do
subject
end
end
+
+ context 'with a log subscriber' do
+ let(:subscriber) { Lograge::RequestLogSubscriber.new }
+
+ let(:event) do
+ ActiveSupport::Notifications::Event.new(
+ 'process_action.action_controller',
+ Time.now,
+ Time.now,
+ 2,
+ status: 200,
+ controller: 'HomeController',
+ action: 'index',
+ format: 'application/json',
+ method: 'GET',
+ path: '/home?foo=bar',
+ params: {},
+ db_runtime: 0.02,
+ view_runtime: 0.01
+ )
+ end
+
+ let(:log_output) { StringIO.new }
+ let(:logger) do
+ Logger.new(log_output).tap { |logger| logger.formatter = ->(_, _, _, msg) { msg } }
+ end
+
+ describe 'with an exception' do
+ let(:exception) { RuntimeError.new('bad request') }
+ let(:backtrace) { caller }
+
+ before do
+ allow(exception).to receive(:backtrace).and_return(backtrace)
+ event.payload[:exception_object] = exception
+ Lograge.logger = logger
+ end
+
+ it 'adds exception data to log' do
+ subscriber.process_action(event)
+
+ log_data = JSON.parse(log_output.string)
+
+ expect(log_data['exception']['class']).to eq('RuntimeError')
+ expect(log_data['exception']['message']).to eq('bad request')
+ expect(log_data['exception']['backtrace']).to eq(Gitlab::Profiler.clean_backtrace(backtrace))
+ end
+ end
+ end
end
diff --git a/spec/initializers/rest-client-hostname_override_spec.rb b/spec/initializers/rest-client-hostname_override_spec.rb
index 3707e001d41..90a0305c9a9 100644
--- a/spec/initializers/rest-client-hostname_override_spec.rb
+++ b/spec/initializers/rest-client-hostname_override_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'rest-client dns rebinding protection' do
diff --git a/spec/initializers/secret_token_spec.rb b/spec/initializers/secret_token_spec.rb
index 726ce07a2d1..c29f46e7779 100644
--- a/spec/initializers/secret_token_spec.rb
+++ b/spec/initializers/secret_token_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require_relative '../../config/initializers/01_secret_token'
diff --git a/spec/initializers/settings_spec.rb b/spec/initializers/settings_spec.rb
index 57f5adbbc40..6cb45b4c86b 100644
--- a/spec/initializers/settings_spec.rb
+++ b/spec/initializers/settings_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require_relative '../../config/initializers/1_settings' unless defined?(Settings)
diff --git a/spec/initializers/trusted_proxies_spec.rb b/spec/initializers/trusted_proxies_spec.rb
index 02a9446ad7b..a2bd0ff9f1c 100644
--- a/spec/initializers/trusted_proxies_spec.rb
+++ b/spec/initializers/trusted_proxies_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'trusted_proxies' do
diff --git a/spec/initializers/zz_metrics_spec.rb b/spec/initializers/zz_metrics_spec.rb
index 3eaccfe8d8b..b9a1919ceae 100644
--- a/spec/initializers/zz_metrics_spec.rb
+++ b/spec/initializers/zz_metrics_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'instrument_classes' do
diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js
index 9f441ca319e..51433a58212 100644
--- a/spec/javascripts/boards/board_card_spec.js
+++ b/spec/javascripts/boards/board_card_spec.js
@@ -10,6 +10,7 @@ import eventHub from '~/boards/eventhub';
import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/list';
+import store from '~/boards/stores';
import boardsStore from '~/boards/stores/boards_store';
import boardCard from '~/boards/components/board_card.vue';
import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data';
@@ -40,6 +41,7 @@ describe('Board card', () => {
list.issues[0].labels.push(label1);
vm = new BoardCardComp({
+ store,
propsData: {
list,
issue: list.issues[0],
diff --git a/spec/javascripts/boards/board_list_common_spec.js b/spec/javascripts/boards/board_list_common_spec.js
index cb337e4cc83..ada7589b795 100644
--- a/spec/javascripts/boards/board_list_common_spec.js
+++ b/spec/javascripts/boards/board_list_common_spec.js
@@ -10,11 +10,17 @@ import BoardList from '~/boards/components/board_list.vue';
import '~/boards/models/issue';
import '~/boards/models/list';
import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data';
+import store from '~/boards/stores';
import boardsStore from '~/boards/stores/boards_store';
window.Sortable = Sortable;
-export default function createComponent({ done, listIssueProps = {}, componentProps = {} }) {
+export default function createComponent({
+ done,
+ listIssueProps = {},
+ componentProps = {},
+ listProps = {},
+}) {
const el = document.createElement('div');
document.body.appendChild(el);
@@ -24,7 +30,7 @@ export default function createComponent({ done, listIssueProps = {}, componentPr
boardsStore.create();
const BoardListComp = Vue.extend(BoardList);
- const list = new List(listObj);
+ const list = new List({ ...listObj, ...listProps });
const issue = new ListIssue({
title: 'Testing',
id: 1,
@@ -34,11 +40,14 @@ export default function createComponent({ done, listIssueProps = {}, componentPr
assignees: [],
...listIssueProps,
});
- list.issuesSize = 1;
+ if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesSize')) {
+ list.issuesSize = 1;
+ }
list.issues.push(issue);
const component = new BoardListComp({
el,
+ store,
propsData: {
disabled: false,
list,
diff --git a/spec/javascripts/boards/board_list_spec.js b/spec/javascripts/boards/board_list_spec.js
index 6774a46ed58..37e96e97279 100644
--- a/spec/javascripts/boards/board_list_spec.js
+++ b/spec/javascripts/boards/board_list_spec.js
@@ -1,156 +1,210 @@
+/* global List */
+
import Vue from 'vue';
import eventHub from '~/boards/eventhub';
import createComponent from './board_list_common_spec';
+import waitForPromises from '../helpers/wait_for_promises';
+
+import '~/boards/models/list';
describe('Board list component', () => {
let mock;
let component;
+ let getIssues;
+ function generateIssues(compWrapper) {
+ for (let i = 1; i < 20; i += 1) {
+ const issue = Object.assign({}, compWrapper.list.issues[0]);
+ issue.id += i;
+ compWrapper.list.issues.push(issue);
+ }
+ }
- beforeEach(done => {
- ({ mock, component } = createComponent({ done }));
- });
+ describe('When Expanded', () => {
+ beforeEach(done => {
+ getIssues = spyOn(List.prototype, 'getIssues').and.returnValue(new Promise(() => {}));
+ ({ mock, component } = createComponent({ done }));
+ });
- afterEach(() => {
- mock.restore();
- });
+ afterEach(() => {
+ mock.restore();
+ component.$destroy();
+ });
- it('renders component', () => {
- expect(component.$el.classList.contains('board-list-component')).toBe(true);
- });
+ it('loads first page of issues', done => {
+ waitForPromises()
+ .then(() => {
+ expect(getIssues).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
- it('renders loading icon', done => {
- component.loading = true;
+ it('renders component', () => {
+ expect(component.$el.classList.contains('board-list-component')).toBe(true);
+ });
+
+ it('renders loading icon', done => {
+ component.loading = true;
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-list-loading')).not.toBeNull();
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-list-loading')).not.toBeNull();
- done();
+ done();
+ });
});
- });
- it('renders issues', () => {
- expect(component.$el.querySelectorAll('.board-card').length).toBe(1);
- });
+ it('renders issues', () => {
+ expect(component.$el.querySelectorAll('.board-card').length).toBe(1);
+ });
- it('sets data attribute with issue id', () => {
- expect(component.$el.querySelector('.board-card').getAttribute('data-issue-id')).toBe('1');
- });
+ it('sets data attribute with issue id', () => {
+ expect(component.$el.querySelector('.board-card').getAttribute('data-issue-id')).toBe('1');
+ });
- it('shows new issue form', done => {
- component.toggleForm();
+ it('shows new issue form', done => {
+ component.toggleForm();
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
- expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
+ expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
- done();
+ done();
+ });
});
- });
- it('shows new issue form after eventhub event', done => {
- eventHub.$emit(`hide-issue-form-${component.list.id}`);
+ it('shows new issue form after eventhub event', done => {
+ eventHub.$emit(`hide-issue-form-${component.list.id}`);
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
- expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
+ expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
- done();
+ done();
+ });
});
- });
- it('does not show new issue form for closed list', done => {
- component.list.type = 'closed';
- component.toggleForm();
+ it('does not show new issue form for closed list', done => {
+ component.list.type = 'closed';
+ component.toggleForm();
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-new-issue-form')).toBeNull();
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-new-issue-form')).toBeNull();
- done();
+ done();
+ });
});
- });
- it('shows count list item', done => {
- component.showCount = true;
+ it('shows count list item', done => {
+ component.showCount = true;
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-list-count')).not.toBeNull();
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-list-count')).not.toBeNull();
- expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
- 'Showing all issues',
- );
+ expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
+ 'Showing all issues',
+ );
- done();
+ done();
+ });
});
- });
- it('sets data attribute with invalid id', done => {
- component.showCount = true;
+ it('sets data attribute with invalid id', done => {
+ component.showCount = true;
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-list-count').getAttribute('data-issue-id')).toBe(
- '-1',
- );
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-list-count').getAttribute('data-issue-id')).toBe(
+ '-1',
+ );
- done();
+ done();
+ });
});
- });
- it('shows how many more issues to load', done => {
- component.showCount = true;
- component.list.issuesSize = 20;
+ it('shows how many more issues to load', done => {
+ component.showCount = true;
+ component.list.issuesSize = 20;
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
- 'Showing 1 of 20 issues',
- );
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
+ 'Showing 1 of 20 issues',
+ );
- done();
+ done();
+ });
});
- });
-
- it('loads more issues after scrolling', done => {
- spyOn(component.list, 'nextPage');
- component.$refs.list.style.height = '100px';
- component.$refs.list.style.overflow = 'scroll';
- for (let i = 1; i < 20; i += 1) {
- const issue = Object.assign({}, component.list.issues[0]);
- issue.id += i;
- component.list.issues.push(issue);
- }
+ it('loads more issues after scrolling', done => {
+ spyOn(component.list, 'nextPage');
+ component.$refs.list.style.height = '100px';
+ component.$refs.list.style.overflow = 'scroll';
+ generateIssues(component);
+
+ Vue.nextTick(() => {
+ component.$refs.list.scrollTop = 20000;
+
+ waitForPromises()
+ .then(() => {
+ expect(component.list.nextPage).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
- Vue.nextTick(() => {
- component.$refs.list.scrollTop = 20000;
+ it('does not load issues if already loading', done => {
+ component.list.nextPage = spyOn(component.list, 'nextPage').and.returnValue(
+ new Promise(() => {}),
+ );
- setTimeout(() => {
- expect(component.list.nextPage).toHaveBeenCalled();
+ component.onScroll();
+ component.onScroll();
- done();
- });
+ waitForPromises()
+ .then(() => {
+ expect(component.list.nextPage).toHaveBeenCalledTimes(1);
+ })
+ .then(done)
+ .catch(done.fail);
});
- });
- it('does not load issues if already loading', () => {
- component.list.nextPage = spyOn(component.list, 'nextPage').and.returnValue(
- new Promise(() => {}),
- );
+ it('shows loading more spinner', done => {
+ component.showCount = true;
+ component.list.loadingMore = true;
- component.onScroll();
- component.onScroll();
+ Vue.nextTick(() => {
+ expect(component.$el.querySelector('.board-list-count .gl-spinner')).not.toBeNull();
- expect(component.list.nextPage).toHaveBeenCalledTimes(1);
+ done();
+ });
+ });
});
- it('shows loading more spinner', done => {
- component.showCount = true;
- component.list.loadingMore = true;
+ describe('When Collapsed', () => {
+ beforeEach(done => {
+ getIssues = spyOn(List.prototype, 'getIssues').and.returnValue(new Promise(() => {}));
+ ({ mock, component } = createComponent({
+ done,
+ listProps: { type: 'closed', collapsed: true, issuesSize: 50 },
+ }));
+ generateIssues(component);
+ component.scrollHeight = spyOn(component, 'scrollHeight').and.returnValue(0);
+ });
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-list-count .gl-spinner')).not.toBeNull();
+ afterEach(() => {
+ mock.restore();
+ component.$destroy();
+ });
- done();
+ it('does not load all issues', done => {
+ waitForPromises()
+ .then(() => {
+ // Initial getIssues from list constructor
+ expect(getIssues).toHaveBeenCalledTimes(1);
+ })
+ .then(done)
+ .catch(done.fail);
});
});
});
diff --git a/spec/javascripts/boards/components/boards_selector_spec.js b/spec/javascripts/boards/components/boards_selector_spec.js
index 473cc0612ea..d1f36a0a652 100644
--- a/spec/javascripts/boards/components/boards_selector_spec.js
+++ b/spec/javascripts/boards/components/boards_selector_spec.js
@@ -1,5 +1,4 @@
import Vue from 'vue';
-import BoardService from '~/boards/services/board_service';
import BoardsSelector from '~/boards/components/boards_selector.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { TEST_HOST } from 'spec/test_constants';
@@ -37,7 +36,6 @@ describe('BoardsSelector', () => {
bulkUpdatePath: '',
boardId: '',
});
- window.gl.boardService = new BoardService();
allBoardsResponse = Promise.resolve({
data: boards,
@@ -46,8 +44,8 @@ describe('BoardsSelector', () => {
data: recentBoards,
});
- spyOn(BoardService.prototype, 'allBoards').and.returnValue(allBoardsResponse);
- spyOn(BoardService.prototype, 'recentBoards').and.returnValue(recentBoardsResponse);
+ spyOn(boardsStore, 'allBoards').and.returnValue(allBoardsResponse);
+ spyOn(boardsStore, 'recentBoards').and.returnValue(recentBoardsResponse);
const Component = Vue.extend(BoardsSelector);
vm = mountComponent(
@@ -94,7 +92,6 @@ describe('BoardsSelector', () => {
afterEach(() => {
vm.$destroy();
- window.gl.boardService = undefined;
});
describe('filtering', () => {
diff --git a/spec/javascripts/boards/components/issue_time_estimate_spec.js b/spec/javascripts/boards/components/issue_time_estimate_spec.js
deleted file mode 100644
index de48e3f6091..00000000000
--- a/spec/javascripts/boards/components/issue_time_estimate_spec.js
+++ /dev/null
@@ -1,70 +0,0 @@
-import Vue from 'vue';
-import IssueTimeEstimate from '~/boards/components/issue_time_estimate.vue';
-import boardsStore from '~/boards/stores/boards_store';
-import mountComponent from '../../helpers/vue_mount_component_helper';
-
-describe('Issue Time Estimate component', () => {
- let vm;
-
- beforeEach(() => {
- boardsStore.create();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('when limitToHours is false', () => {
- beforeEach(() => {
- boardsStore.timeTracking.limitToHours = false;
-
- const Component = Vue.extend(IssueTimeEstimate);
- vm = mountComponent(Component, {
- estimate: 374460,
- });
- });
-
- it('renders the correct time estimate', () => {
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual('2w 3d 1m');
- });
-
- it('renders expanded time estimate in tooltip', () => {
- expect(vm.$el.querySelector('.js-issue-time-estimate').textContent).toContain(
- '2 weeks 3 days 1 minute',
- );
- });
-
- it('prevents tooltip xss', done => {
- const alertSpy = spyOn(window, 'alert');
- vm.estimate = 'Foo <script>alert("XSS")</script>';
-
- vm.$nextTick(() => {
- expect(alertSpy).not.toHaveBeenCalled();
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual('0m');
- expect(vm.$el.querySelector('.js-issue-time-estimate').textContent).toContain('0m');
- done();
- });
- });
- });
-
- describe('when limitToHours is true', () => {
- beforeEach(() => {
- boardsStore.timeTracking.limitToHours = true;
-
- const Component = Vue.extend(IssueTimeEstimate);
- vm = mountComponent(Component, {
- estimate: 374460,
- });
- });
-
- it('renders the correct time estimate', () => {
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual('104h 1m');
- });
-
- it('renders expanded time estimate in tooltip', () => {
- expect(vm.$el.querySelector('.js-issue-time-estimate').textContent).toContain(
- '104 hours 1 minute',
- );
- });
- });
-});
diff --git a/spec/javascripts/boards/issue_card_spec.js b/spec/javascripts/boards/issue_card_spec.js
deleted file mode 100644
index 314e051665e..00000000000
--- a/spec/javascripts/boards/issue_card_spec.js
+++ /dev/null
@@ -1,292 +0,0 @@
-/* global ListAssignee */
-/* global ListLabel */
-/* global ListIssue */
-
-import Vue from 'vue';
-
-import '~/boards/models/label';
-import '~/boards/models/assignee';
-import '~/boards/models/issue';
-import '~/boards/models/list';
-import IssueCardInner from '~/boards/components/issue_card_inner.vue';
-import { listObj } from './mock_data';
-
-describe('Issue card component', () => {
- const user = new ListAssignee({
- id: 1,
- name: 'testing 123',
- username: 'test',
- avatar: 'test_image',
- });
- const label1 = new ListLabel({
- id: 3,
- title: 'testing 123',
- color: 'blue',
- text_color: 'white',
- description: 'test',
- });
- let component;
- let issue;
- let list;
-
- beforeEach(() => {
- setFixtures('<div class="test-container"></div>');
-
- list = {
- ...listObj,
- type: 'label',
- };
- issue = new ListIssue({
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
- labels: [list.label],
- assignees: [],
- reference_path: '#1',
- real_path: '/test/1',
- weight: 1,
- });
-
- component = new Vue({
- el: document.querySelector('.test-container'),
- components: {
- 'issue-card': IssueCardInner,
- },
- data() {
- return {
- list,
- issue,
- issueLinkBase: '/test',
- rootPath: '/',
- };
- },
- template: `
- <issue-card
- :issue="issue"
- :list="list"
- :issue-link-base="issueLinkBase"
- :root-path="rootPath"></issue-card>
- `,
- });
- });
-
- it('renders issue title', () => {
- expect(component.$el.querySelector('.board-card-title').textContent).toContain(issue.title);
- });
-
- it('includes issue base in link', () => {
- expect(component.$el.querySelector('.board-card-title a').getAttribute('href')).toContain(
- '/test',
- );
- });
-
- it('includes issue title on link', () => {
- expect(component.$el.querySelector('.board-card-title a').getAttribute('title')).toBe(
- issue.title,
- );
- });
-
- it('does not render confidential icon', () => {
- expect(component.$el.querySelector('.fa-eye-flash')).toBeNull();
- });
-
- it('renders confidential icon', done => {
- component.issue.confidential = true;
-
- Vue.nextTick(() => {
- expect(component.$el.querySelector('.confidential-icon')).not.toBeNull();
- done();
- });
- });
-
- it('renders issue ID with #', () => {
- expect(component.$el.querySelector('.board-card-number').textContent).toContain(`#${issue.id}`);
- });
-
- describe('assignee', () => {
- it('does not render assignee', () => {
- expect(component.$el.querySelector('.board-card-assignee .avatar')).toBeNull();
- });
-
- describe('exists', () => {
- beforeEach(done => {
- component.issue.assignees = [user];
-
- Vue.nextTick(() => done());
- });
-
- it('renders assignee', () => {
- expect(component.$el.querySelector('.board-card-assignee .avatar')).not.toBeNull();
- });
-
- it('sets title', () => {
- expect(component.$el.querySelector('.js-assignee-tooltip').textContent).toContain(
- `${user.name}`,
- );
- });
-
- it('sets users path', () => {
- expect(component.$el.querySelector('.board-card-assignee a').getAttribute('href')).toBe(
- '/test',
- );
- });
-
- it('renders avatar', () => {
- expect(component.$el.querySelector('.board-card-assignee img')).not.toBeNull();
- });
- });
-
- describe('assignee default avatar', () => {
- beforeEach(done => {
- component.issue.assignees = [
- new ListAssignee(
- {
- id: 1,
- name: 'testing 123',
- username: 'test',
- },
- 'default_avatar',
- ),
- ];
-
- Vue.nextTick(done);
- });
-
- it('displays defaults avatar if users avatar is null', () => {
- expect(component.$el.querySelector('.board-card-assignee img')).not.toBeNull();
- expect(component.$el.querySelector('.board-card-assignee img').getAttribute('src')).toBe(
- 'default_avatar?width=24',
- );
- });
- });
- });
-
- describe('multiple assignees', () => {
- beforeEach(done => {
- component.issue.assignees = [
- new ListAssignee({
- id: 2,
- name: 'user2',
- username: 'user2',
- avatar: 'test_image',
- }),
- new ListAssignee({
- id: 3,
- name: 'user3',
- username: 'user3',
- avatar: 'test_image',
- }),
- new ListAssignee({
- id: 4,
- name: 'user4',
- username: 'user4',
- avatar: 'test_image',
- }),
- ];
-
- Vue.nextTick(() => done());
- });
-
- it('renders all three assignees', () => {
- expect(component.$el.querySelectorAll('.board-card-assignee .avatar').length).toEqual(3);
- });
-
- describe('more than three assignees', () => {
- beforeEach(done => {
- component.issue.assignees.push(
- new ListAssignee({
- id: 5,
- name: 'user5',
- username: 'user5',
- avatar: 'test_image',
- }),
- );
-
- Vue.nextTick(() => done());
- });
-
- it('renders more avatar counter', () => {
- expect(
- component.$el.querySelector('.board-card-assignee .avatar-counter').innerText.trim(),
- ).toEqual('+2');
- });
-
- it('renders two assignees', () => {
- expect(component.$el.querySelectorAll('.board-card-assignee .avatar').length).toEqual(2);
- });
-
- it('renders 99+ avatar counter', done => {
- for (let i = 5; i < 104; i += 1) {
- const u = new ListAssignee({
- id: i,
- name: 'name',
- username: 'username',
- avatar: 'test_image',
- });
- component.issue.assignees.push(u);
- }
-
- Vue.nextTick(() => {
- expect(
- component.$el.querySelector('.board-card-assignee .avatar-counter').innerText.trim(),
- ).toEqual('99+');
- done();
- });
- });
- });
- });
-
- describe('labels', () => {
- beforeEach(done => {
- component.issue.addLabel(label1);
-
- Vue.nextTick(() => done());
- });
-
- it('does not render list label but renders all other labels', () => {
- expect(component.$el.querySelectorAll('.badge').length).toBe(1);
- });
-
- it('renders label', () => {
- const nodes = [];
- component.$el.querySelectorAll('.badge').forEach(label => {
- nodes.push(label.getAttribute('data-original-title'));
- });
-
- expect(nodes.includes(label1.description)).toBe(true);
- });
-
- it('sets label description as title', () => {
- expect(component.$el.querySelector('.badge').getAttribute('data-original-title')).toContain(
- label1.description,
- );
- });
-
- it('sets background color of button', () => {
- const nodes = [];
- component.$el.querySelectorAll('.badge').forEach(label => {
- nodes.push(label.style.backgroundColor);
- });
-
- expect(nodes.includes(label1.color)).toBe(true);
- });
-
- it('does not render label if label does not have an ID', done => {
- component.issue.addLabel(
- new ListLabel({
- title: 'closed',
- }),
- );
-
- Vue.nextTick()
- .then(() => {
- expect(component.$el.querySelectorAll('.badge').length).toBe(1);
- expect(component.$el.textContent).not.toContain('closed');
-
- done();
- })
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/bootstrap_jquery_spec.js b/spec/javascripts/bootstrap_jquery_spec.js
index 35340a3bc42..6957cf40301 100644
--- a/spec/javascripts/bootstrap_jquery_spec.js
+++ b/spec/javascripts/bootstrap_jquery_spec.js
@@ -1,5 +1,3 @@
-/* eslint-disable no-var */
-
import $ from 'jquery';
import '~/commons/bootstrap';
@@ -10,15 +8,13 @@ describe('Bootstrap jQuery extensions', function() {
});
it('adds the disabled attribute', function() {
- var $input;
- $input = $('input').first();
+ const $input = $('input').first();
$input.disable();
expect($input).toHaveAttr('disabled', 'disabled');
});
return it('adds the disabled class', function() {
- var $input;
- $input = $('input').first();
+ const $input = $('input').first();
$input.disable();
expect($input).toHaveClass('disabled');
@@ -30,15 +26,13 @@ describe('Bootstrap jQuery extensions', function() {
});
it('removes the disabled attribute', function() {
- var $input;
- $input = $('input').first();
+ const $input = $('input').first();
$input.enable();
expect($input).not.toHaveAttr('disabled');
});
return it('removes the disabled class', function() {
- var $input;
- $input = $('input').first();
+ const $input = $('input').first();
$input.enable();
expect($input).not.toHaveClass('disabled');
diff --git a/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js b/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js
index b2fe315f6c6..b53e30b6896 100644
--- a/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js
+++ b/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import AjaxFormVariableList from '~/ci_variable_list/ajax_variable_list';
-const VARIABLE_PATCH_ENDPOINT = 'http://test.host/frontend-fixtures/builds-project/variables';
+const VARIABLE_PATCH_ENDPOINT = 'http://test.host/frontend-fixtures/builds-project/-/variables';
const HIDE_CLASS = 'hide';
describe('AjaxFormVariableList', () => {
diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js
index 3ca2d1dc934..6ffdb6ba85d 100644
--- a/spec/javascripts/diffs/components/diff_file_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_spec.js
@@ -3,14 +3,15 @@ import DiffFileComponent from '~/diffs/components/diff_file.vue';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
import { createStore } from 'ee_else_ce/mr_notes/stores';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import diffFileMockData from '../mock_data/diff_file';
+import diffFileMockDataReadable from '../mock_data/diff_file';
+import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
describe('DiffFile', () => {
let vm;
beforeEach(() => {
vm = createComponentWithStore(Vue.extend(DiffFileComponent), createStore(), {
- file: JSON.parse(JSON.stringify(diffFileMockData)),
+ file: JSON.parse(JSON.stringify(diffFileMockDataReadable)),
canCurrentUserFork: false,
}).$mount();
});
@@ -81,6 +82,24 @@ describe('DiffFile', () => {
});
});
+ it('should be collapsable for unreadable files', done => {
+ vm.$destroy();
+ vm = createComponentWithStore(Vue.extend(DiffFileComponent), createStore(), {
+ file: JSON.parse(JSON.stringify(diffFileMockDataUnreadable)),
+ canCurrentUserFork: false,
+ }).$mount();
+
+ vm.renderIt = false;
+ vm.isCollapsed = true;
+
+ vm.$nextTick(() => {
+ expect(vm.$el.innerText).toContain('This diff is collapsed');
+ expect(vm.$el.querySelectorAll('.js-click-to-expand').length).toEqual(1);
+
+ done();
+ });
+ });
+
it('should be collapsed for renamed files', done => {
vm.renderIt = true;
vm.isCollapsed = false;
@@ -184,5 +203,31 @@ describe('DiffFile', () => {
.then(done)
.catch(done.fail);
});
+
+ it('does not call handleLoadCollapsedDiff if collapsed changed & file is unreadable', done => {
+ vm.$destroy();
+ vm = createComponentWithStore(Vue.extend(DiffFileComponent), createStore(), {
+ file: JSON.parse(JSON.stringify(diffFileMockDataUnreadable)),
+ canCurrentUserFork: false,
+ }).$mount();
+
+ spyOn(vm, 'handleLoadCollapsedDiff');
+
+ vm.file.highlighted_diff_lines = undefined;
+ vm.file.parallel_diff_lines = [];
+ vm.isCollapsed = true;
+
+ vm.$nextTick()
+ .then(() => {
+ vm.isCollapsed = false;
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ expect(vm.handleLoadCollapsedDiff).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
});
});
diff --git a/spec/javascripts/diffs/mock_data/diff_file_unreadable.js b/spec/javascripts/diffs/mock_data/diff_file_unreadable.js
new file mode 100644
index 00000000000..8c2df45988e
--- /dev/null
+++ b/spec/javascripts/diffs/mock_data/diff_file_unreadable.js
@@ -0,0 +1,244 @@
+export default {
+ submodule: false,
+ submodule_link: null,
+ blob: {
+ id: '9e10516ca50788acf18c518a231914a21e5f16f7',
+ path: 'CHANGELOG',
+ name: 'CHANGELOG',
+ mode: '100644',
+ readable_text: false,
+ icon: 'file-text-o',
+ },
+ blob_path: 'CHANGELOG',
+ blob_name: 'CHANGELOG',
+ blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>',
+ file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a',
+ file_path: 'CHANGELOG',
+ new_file: false,
+ deleted_file: false,
+ renamed_file: false,
+ old_path: 'CHANGELOG',
+ new_path: 'CHANGELOG',
+ mode_changed: false,
+ a_mode: '100644',
+ b_mode: '100644',
+ text: true,
+ viewer: {
+ name: 'text',
+ error: null,
+ collapsed: false,
+ },
+ added_lines: 0,
+ removed_lines: 0,
+ diff_refs: {
+ base_sha: 'e63f41fe459e62e1228fcef60d7189127aeba95a',
+ start_sha: 'd9eaefe5a676b820c57ff18cf5b68316025f7962',
+ head_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13',
+ },
+ content_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13',
+ stored_externally: null,
+ external_storage: null,
+ old_path_html: 'CHANGELOG',
+ new_path_html: 'CHANGELOG',
+ edit_path: '/gitlab-org/gitlab-test/edit/spooky-stuff/CHANGELOG',
+ view_path: '/gitlab-org/gitlab-test/blob/spooky-stuff/CHANGELOG',
+ replaced_view_path: null,
+ collapsed: false,
+ renderIt: false,
+ too_large: false,
+ context_lines_path:
+ '/gitlab-org/gitlab-test/blob/c48ee0d1bf3b30453f5b32250ce03134beaa6d13/CHANGELOG/diff',
+ highlighted_diff_lines: [
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1',
+ type: 'new',
+ old_line: null,
+ new_line: 1,
+ discussions: [],
+ text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
+ type: 'new',
+ old_line: null,
+ new_line: 2,
+ discussions: [],
+ text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
+ rich_text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
+ type: null,
+ old_line: 1,
+ new_line: 3,
+ discussions: [],
+ text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ rich_text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
+ type: null,
+ old_line: 2,
+ new_line: 4,
+ discussions: [],
+ text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ rich_text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
+ type: null,
+ old_line: 3,
+ new_line: 5,
+ discussions: [],
+ text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ rich_text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ discussions: [],
+ text: '',
+ rich_text: '',
+ meta_data: {
+ old_pos: 3,
+ new_pos: 5,
+ },
+ },
+ ],
+ parallel_diff_lines: [
+ {
+ left: {
+ type: 'empty-cell',
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1',
+ type: 'new',
+ old_line: null,
+ new_line: 1,
+ discussions: [],
+ text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ rich_text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ type: 'empty-cell',
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
+ type: 'new',
+ old_line: null,
+ new_line: 2,
+ discussions: [],
+ text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
+ rich_text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_Code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
+ type: null,
+ old_line: 1,
+ new_line: 3,
+ discussions: [],
+ text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ meta_data: null,
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
+ type: null,
+ old_line: 1,
+ new_line: 3,
+ discussions: [],
+ text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
+ type: null,
+ old_line: 2,
+ new_line: 4,
+ discussions: [],
+ text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
+ type: null,
+ old_line: 2,
+ new_line: 4,
+ discussions: [],
+ text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
+ type: null,
+ old_line: 3,
+ new_line: 5,
+ discussions: [],
+ text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ meta_data: null,
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
+ type: null,
+ old_line: 3,
+ new_line: 5,
+ discussions: [],
+ text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ discussions: [],
+ text: '',
+ rich_text: '',
+ meta_data: {
+ old_pos: 3,
+ new_pos: 5,
+ },
+ },
+ right: {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ discussions: [],
+ text: '',
+ rich_text: '',
+ meta_data: {
+ old_pos: 3,
+ new_pos: 5,
+ },
+ },
+ },
+ ],
+ discussions: [],
+ renderingLines: false,
+};
diff --git a/spec/javascripts/dropzone_input_spec.js b/spec/javascripts/dropzone_input_spec.js
index ef899612b08..125dcdb3763 100644
--- a/spec/javascripts/dropzone_input_spec.js
+++ b/spec/javascripts/dropzone_input_spec.js
@@ -13,54 +13,68 @@ const TEMPLATE = `<form class="gfm-form" data-uploads-path="${TEST_UPLOAD_PATH}"
</form>`;
describe('dropzone_input', () => {
- let form;
- let dropzone;
- let xhr;
- let oldXMLHttpRequest;
+ it('returns null when failed to initialize', () => {
+ const dropzone = dropzoneInput($('<form class="gfm-form"></form>'));
- beforeEach(() => {
- form = $(TEMPLATE);
+ expect(dropzone).toBeNull();
+ });
- dropzone = dropzoneInput(form);
+ it('returns valid dropzone when successfully initialize', () => {
+ const dropzone = dropzoneInput($(TEMPLATE));
- xhr = jasmine.createSpyObj(Object.keys(XMLHttpRequest.prototype));
- oldXMLHttpRequest = window.XMLHttpRequest;
- window.XMLHttpRequest = () => xhr;
+ expect(dropzone.version).toBeTruthy();
});
- afterEach(() => {
- window.XMLHttpRequest = oldXMLHttpRequest;
- });
+ describe('shows error message', () => {
+ let form;
+ let dropzone;
+ let xhr;
+ let oldXMLHttpRequest;
- it('shows error message, when AJAX fails with json', () => {
- xhr = {
- ...xhr,
- statusCode: 400,
- readyState: 4,
- responseText: JSON.stringify({ message: TEST_ERROR_MESSAGE }),
- getResponseHeader: () => 'application/json',
- };
+ beforeEach(() => {
+ form = $(TEMPLATE);
- dropzone.processFile(TEST_FILE);
+ dropzone = dropzoneInput(form);
- xhr.onload();
+ xhr = jasmine.createSpyObj(Object.keys(XMLHttpRequest.prototype));
+ oldXMLHttpRequest = window.XMLHttpRequest;
+ window.XMLHttpRequest = () => xhr;
+ });
- expect(form.find('.uploading-error-message').text()).toEqual(TEST_ERROR_MESSAGE);
- });
+ afterEach(() => {
+ window.XMLHttpRequest = oldXMLHttpRequest;
+ });
+
+ it('when AJAX fails with json', () => {
+ xhr = {
+ ...xhr,
+ statusCode: 400,
+ readyState: 4,
+ responseText: JSON.stringify({ message: TEST_ERROR_MESSAGE }),
+ getResponseHeader: () => 'application/json',
+ };
+
+ dropzone.processFile(TEST_FILE);
+
+ xhr.onload();
+
+ expect(form.find('.uploading-error-message').text()).toEqual(TEST_ERROR_MESSAGE);
+ });
- it('shows error message, when AJAX fails with text', () => {
- xhr = {
- ...xhr,
- statusCode: 400,
- readyState: 4,
- responseText: TEST_ERROR_MESSAGE,
- getResponseHeader: () => 'text/plain',
- };
+ it('when AJAX fails with text', () => {
+ xhr = {
+ ...xhr,
+ statusCode: 400,
+ readyState: 4,
+ responseText: TEST_ERROR_MESSAGE,
+ getResponseHeader: () => 'text/plain',
+ };
- dropzone.processFile(TEST_FILE);
+ dropzone.processFile(TEST_FILE);
- xhr.onload();
+ xhr.onload();
- expect(form.find('.uploading-error-message').text()).toEqual(TEST_ERROR_MESSAGE);
+ expect(form.find('.uploading-error-message').text()).toEqual(TEST_ERROR_MESSAGE);
+ });
});
});
diff --git a/spec/javascripts/frequent_items/components/app_spec.js b/spec/javascripts/frequent_items/components/app_spec.js
index 36dd8604d08..da0427d650a 100644
--- a/spec/javascripts/frequent_items/components/app_spec.js
+++ b/spec/javascripts/frequent_items/components/app_spec.js
@@ -247,7 +247,7 @@ describe('Frequent Items App Component', () => {
.then(() => {
expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(
- mockSearchedProjects.length,
+ mockSearchedProjects.data.length,
);
})
.then(done)
diff --git a/spec/javascripts/frequent_items/mock_data.js b/spec/javascripts/frequent_items/mock_data.js
index 3ca5b4c7446..7f7d7b1cdbf 100644
--- a/spec/javascripts/frequent_items/mock_data.js
+++ b/spec/javascripts/frequent_items/mock_data.js
@@ -68,7 +68,7 @@ export const mockFrequentGroups = [
},
];
-export const mockSearchedGroups = [mockRawGroup];
+export const mockSearchedGroups = { data: [mockRawGroup] };
export const mockProcessedSearchedGroups = [mockGroup];
export const mockProject = {
@@ -135,7 +135,7 @@ export const mockFrequentProjects = [
},
];
-export const mockSearchedProjects = [mockRawProject];
+export const mockSearchedProjects = { data: [mockRawProject] };
export const mockProcessedSearchedProjects = [mockProject];
export const unsortedFrequentItems = [
diff --git a/spec/javascripts/frequent_items/store/actions_spec.js b/spec/javascripts/frequent_items/store/actions_spec.js
index 0a8525e77d6..7b065b69cce 100644
--- a/spec/javascripts/frequent_items/store/actions_spec.js
+++ b/spec/javascripts/frequent_items/store/actions_spec.js
@@ -169,7 +169,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
it('should dispatch `receiveSearchedItemsSuccess`', done => {
- mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects);
+ mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects, {});
testAction(
actions.fetchSearchedItems,
@@ -178,7 +178,10 @@ describe('Frequent Items Dropdown Store Actions', () => {
[],
[
{ type: 'requestSearchedItems' },
- { type: 'receiveSearchedItemsSuccess', payload: mockSearchedProjects },
+ {
+ type: 'receiveSearchedItemsSuccess',
+ payload: { data: mockSearchedProjects, headers: {} },
+ },
],
done,
);
diff --git a/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js b/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js
deleted file mode 100644
index 563d134ca81..00000000000
--- a/spec/javascripts/graphs/stat_graph_contributors_graph_spec.js
+++ /dev/null
@@ -1,152 +0,0 @@
-/* eslint-disable jasmine/no-suite-dupes, vars-on-top, no-var */
-
-import { scaleLinear, scaleTime } from 'd3-scale';
-import { timeParse } from 'd3-time-format';
-import {
- ContributorsGraph,
- ContributorsMasterGraph,
-} from '~/pages/projects/graphs/show/stat_graph_contributors_graph';
-
-const d3 = { scaleLinear, scaleTime, timeParse };
-
-describe('ContributorsGraph', function() {
- describe('#set_x_domain', function() {
- it('set the x_domain', function() {
- ContributorsGraph.set_x_domain(20);
-
- expect(ContributorsGraph.prototype.x_domain).toEqual(20);
- });
- });
-
- describe('#set_y_domain', function() {
- it('sets the y_domain', function() {
- ContributorsGraph.set_y_domain([{ commits: 30 }]);
-
- expect(ContributorsGraph.prototype.y_domain).toEqual([0, 30]);
- });
- });
-
- describe('#init_x_domain', function() {
- it('sets the initial x_domain', function() {
- ContributorsGraph.init_x_domain([{ date: '2013-01-31' }, { date: '2012-01-31' }]);
-
- expect(ContributorsGraph.prototype.x_domain).toEqual(['2012-01-31', '2013-01-31']);
- });
- });
-
- describe('#init_y_domain', function() {
- it('sets the initial y_domain', function() {
- ContributorsGraph.init_y_domain([{ commits: 30 }]);
-
- expect(ContributorsGraph.prototype.y_domain).toEqual([0, 30]);
- });
- });
-
- describe('#init_domain', function() {
- it('calls init_x_domain and init_y_domain', function() {
- spyOn(ContributorsGraph, 'init_x_domain');
- spyOn(ContributorsGraph, 'init_y_domain');
- ContributorsGraph.init_domain();
-
- expect(ContributorsGraph.init_x_domain).toHaveBeenCalled();
- expect(ContributorsGraph.init_y_domain).toHaveBeenCalled();
- });
- });
-
- describe('#set_dates', function() {
- it('sets the dates', function() {
- ContributorsGraph.set_dates('2013-12-01');
-
- expect(ContributorsGraph.prototype.dates).toEqual('2013-12-01');
- });
- });
-
- describe('#set_x_domain', function() {
- it("sets the instance's x domain using the prototype's x_domain", function() {
- ContributorsGraph.prototype.x_domain = 20;
- var instance = new ContributorsGraph();
- instance.x = d3
- .scaleTime()
- .range([0, 100])
- .clamp(true);
- spyOn(instance.x, 'domain');
- instance.set_x_domain();
-
- expect(instance.x.domain).toHaveBeenCalledWith(20);
- });
- });
-
- describe('#set_y_domain', function() {
- it("sets the instance's y domain using the prototype's y_domain", function() {
- ContributorsGraph.prototype.y_domain = 30;
- var instance = new ContributorsGraph();
- instance.y = d3
- .scaleLinear()
- .range([100, 0])
- .nice();
- spyOn(instance.y, 'domain');
- instance.set_y_domain();
-
- expect(instance.y.domain).toHaveBeenCalledWith(30);
- });
- });
-
- describe('#set_domain', function() {
- it('calls set_x_domain and set_y_domain', function() {
- var instance = new ContributorsGraph();
- spyOn(instance, 'set_x_domain');
- spyOn(instance, 'set_y_domain');
- instance.set_domain();
-
- expect(instance.set_x_domain).toHaveBeenCalled();
- expect(instance.set_y_domain).toHaveBeenCalled();
- });
- });
-
- describe('#set_data', function() {
- it('sets the data', function() {
- var instance = new ContributorsGraph();
- instance.set_data('20');
-
- expect(instance.data).toEqual('20');
- });
- });
-});
-
-describe('ContributorsMasterGraph', function() {
- // TODO: fix or remove
- // describe("#process_dates", function () {
- // it("gets and parses dates", function () {
- // var graph = new ContributorsMasterGraph();
- // var data = 'random data here';
- // spyOn(graph, 'parse_dates');
- // spyOn(graph, 'get_dates').andReturn("get");
- // spyOn(ContributorsGraph,'set_dates').andCallThrough();
- // graph.process_dates(data);
- // expect(graph.parse_dates).toHaveBeenCalledWith(data);
- // expect(graph.get_dates).toHaveBeenCalledWith(data);
- // expect(ContributorsGraph.set_dates).toHaveBeenCalledWith("get");
- // });
- // });
-
- describe('#get_dates', function() {
- it('plucks the date field from data collection', function() {
- var graph = new ContributorsMasterGraph();
- var data = [{ date: '2013-01-01' }, { date: '2012-12-15' }];
-
- expect(graph.get_dates(data)).toEqual(['2013-01-01', '2012-12-15']);
- });
- });
-
- describe('#parse_dates', function() {
- it('parses the dates', function() {
- var graph = new ContributorsMasterGraph();
- var parseDate = d3.timeParse('%Y-%m-%d');
- var data = [{ date: '2013-01-01' }, { date: '2012-12-15' }];
- var correct = [{ date: parseDate(data[0].date) }, { date: parseDate(data[1].date) }];
- graph.parse_dates(data);
-
- expect(data).toEqual(correct);
- });
- });
-});
diff --git a/spec/javascripts/graphs/stat_graph_contributors_spec.js b/spec/javascripts/graphs/stat_graph_contributors_spec.js
deleted file mode 100644
index 2ebb6845a8b..00000000000
--- a/spec/javascripts/graphs/stat_graph_contributors_spec.js
+++ /dev/null
@@ -1,28 +0,0 @@
-import ContributorsStatGraph from '~/pages/projects/graphs/show/stat_graph_contributors';
-import { ContributorsGraph } from '~/pages/projects/graphs/show/stat_graph_contributors_graph';
-
-import { setLanguage } from '../helpers/locale_helper';
-
-describe('ContributorsStatGraph', () => {
- describe('change_date_header', () => {
- beforeAll(() => {
- setLanguage('de');
- });
-
- afterAll(() => {
- setLanguage(null);
- });
-
- it('uses the locale to display date ranges', () => {
- ContributorsGraph.init_x_domain([{ date: '2013-01-31' }, { date: '2012-01-31' }]);
- setFixtures('<div id="date_header"></div>');
- const graph = new ContributorsStatGraph();
-
- graph.change_date_header();
-
- expect(document.getElementById('date_header').innerText).toBe(
- '31. Januar 2012 – 31. Januar 2013',
- );
- });
- });
-});
diff --git a/spec/javascripts/graphs/stat_graph_contributors_util_spec.js b/spec/javascripts/graphs/stat_graph_contributors_util_spec.js
deleted file mode 100644
index 511b660c671..00000000000
--- a/spec/javascripts/graphs/stat_graph_contributors_util_spec.js
+++ /dev/null
@@ -1,298 +0,0 @@
-/* eslint-disable no-var, camelcase, vars-on-top */
-
-import ContributorsStatGraphUtil from '~/pages/projects/graphs/show/stat_graph_contributors_util';
-
-describe('ContributorsStatGraphUtil', function() {
- describe('#parse_log', function() {
- it('returns a correctly parsed log', function() {
- var fake_log = [
- {
- author_email: 'karlo@email.com',
- author_name: 'Karlo Soriano',
- date: '2013-05-09',
- additions: 471,
- },
- {
- author_email: 'dzaporozhets@email.com',
- author_name: 'Dmitriy Zaporozhets',
- date: '2013-05-08',
- additions: 6,
- deletions: 1,
- },
- {
- author_email: 'dzaporozhets@email.com',
- author_name: 'Dmitriy Zaporozhets',
- date: '2013-05-08',
- additions: 19,
- deletions: 3,
- },
- {
- author_email: 'dzaporozhets@email.com',
- author_name: 'Dmitriy Zaporozhets',
- date: '2013-05-08',
- additions: 29,
- deletions: 3,
- },
- ];
-
- var correct_parsed_log = {
- total: [
- { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- ],
- by_author: [
- {
- author_name: 'Karlo Soriano',
- author_email: 'karlo@email.com',
- '2013-05-09': { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- },
- {
- author_name: 'Dmitriy Zaporozhets',
- author_email: 'dzaporozhets@email.com',
- '2013-05-08': { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- },
- ],
- };
-
- expect(ContributorsStatGraphUtil.parse_log(fake_log)).toEqual(correct_parsed_log);
- });
- });
-
- describe('#store_data', function() {
- var fake_entry = { author: 'Karlo Soriano', date: '2013-05-09', additions: 471 };
- var fake_total = {};
- var fake_by_author = {};
-
- it('calls #store_commits', function() {
- spyOn(ContributorsStatGraphUtil, 'store_commits');
- ContributorsStatGraphUtil.store_data(fake_entry, fake_total, fake_by_author);
-
- expect(ContributorsStatGraphUtil.store_commits).toHaveBeenCalled();
- });
-
- it('calls #store_additions', function() {
- spyOn(ContributorsStatGraphUtil, 'store_additions');
- ContributorsStatGraphUtil.store_data(fake_entry, fake_total, fake_by_author);
-
- expect(ContributorsStatGraphUtil.store_additions).toHaveBeenCalled();
- });
-
- it('calls #store_deletions', function() {
- spyOn(ContributorsStatGraphUtil, 'store_deletions');
- ContributorsStatGraphUtil.store_data(fake_entry, fake_total, fake_by_author);
-
- expect(ContributorsStatGraphUtil.store_deletions).toHaveBeenCalled();
- });
- });
-
- // TODO: fix or remove
- // describe("#store_commits", function () {
- // var fake_total = "fake_total";
- // var fake_by_author = "fake_by_author";
- //
- // it("calls #add twice with arguments fake_total and fake_by_author respectively", function () {
- // spyOn(ContributorsStatGraphUtil, 'add');
- // ContributorsStatGraphUtil.store_commits(fake_total, fake_by_author);
- // expect(ContributorsStatGraphUtil.add.argsForCall).toEqual([["fake_total", "commits", 1], ["fake_by_author", "commits", 1]]);
- // });
- // });
-
- describe('#add', function() {
- it('adds 1 to current test_field in collection', function() {
- var fake_collection = { test_field: 10 };
- ContributorsStatGraphUtil.add(fake_collection, 'test_field', 1);
-
- expect(fake_collection.test_field).toEqual(11);
- });
-
- it('inits and adds 1 if test_field in collection is not defined', function() {
- var fake_collection = {};
- ContributorsStatGraphUtil.add(fake_collection, 'test_field', 1);
-
- expect(fake_collection.test_field).toEqual(1);
- });
- });
-
- // TODO: fix or remove
- // describe("#store_additions", function () {
- // var fake_entry = {additions: 10};
- // var fake_total= "fake_total";
- // var fake_by_author = "fake_by_author";
- // it("calls #add twice with arguments fake_total and fake_by_author respectively", function () {
- // spyOn(ContributorsStatGraphUtil, 'add');
- // ContributorsStatGraphUtil.store_additions(fake_entry, fake_total, fake_by_author);
- // expect(ContributorsStatGraphUtil.add.argsForCall).toEqual([["fake_total", "additions", 10], ["fake_by_author", "additions", 10]]);
- // });
- // });
-
- // TODO: fix or remove
- // describe("#store_deletions", function () {
- // var fake_entry = {deletions: 10};
- // var fake_total= "fake_total";
- // var fake_by_author = "fake_by_author";
- // it("calls #add twice with arguments fake_total and fake_by_author respectively", function () {
- // spyOn(ContributorsStatGraphUtil, 'add');
- // ContributorsStatGraphUtil.store_deletions(fake_entry, fake_total, fake_by_author);
- // expect(ContributorsStatGraphUtil.add.argsForCall).toEqual([["fake_total", "deletions", 10], ["fake_by_author", "deletions", 10]]);
- // });
- // });
-
- describe('#add_date', function() {
- it('adds a date field to the collection', function() {
- var fake_date = '2013-10-02';
- var fake_collection = {};
- ContributorsStatGraphUtil.add_date(fake_date, fake_collection);
-
- expect(fake_collection[fake_date].date).toEqual('2013-10-02');
- });
- });
-
- describe('#add_author', function() {
- it('adds an author field to the collection', function() {
- var fake_author = { author_name: 'Author', author_email: 'fake@email.com' };
- var fake_author_collection = {};
- var fake_email_collection = {};
- ContributorsStatGraphUtil.add_author(
- fake_author,
- fake_author_collection,
- fake_email_collection,
- );
-
- expect(fake_author_collection[fake_author.author_name].author_name).toEqual('Author');
- expect(fake_email_collection[fake_author.author_email].author_name).toEqual('Author');
- });
- });
-
- describe('#get_total_data', function() {
- it('returns the collection sorted via specified field', function() {
- var fake_parsed_log = {
- total: [
- { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- ],
- by_author: [
- {
- author: 'Karlo Soriano',
- '2013-05-09': { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- },
- {
- author: 'Dmitriy Zaporozhets',
- '2013-05-08': { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- },
- ],
- };
- var correct_total_data = [
- { date: '2013-05-08', commits: 3 },
- { date: '2013-05-09', commits: 1 },
- ];
-
- expect(ContributorsStatGraphUtil.get_total_data(fake_parsed_log, 'commits')).toEqual(
- correct_total_data,
- );
- });
- });
-
- describe('#pick_field', function() {
- it('returns the collection with only the specified field and date', function() {
- var fake_parsed_log_total = [
- { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- ];
- ContributorsStatGraphUtil.pick_field(fake_parsed_log_total, 'commits');
- var correct_pick_field_data = [
- { date: '2013-05-09', commits: 1 },
- { date: '2013-05-08', commits: 3 },
- ];
-
- expect(ContributorsStatGraphUtil.pick_field(fake_parsed_log_total, 'commits')).toEqual(
- correct_pick_field_data,
- );
- });
- });
-
- describe('#get_author_data', function() {
- it('returns the log by author sorted by specified field', function() {
- var fake_parsed_log = {
- total: [
- { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- ],
- by_author: [
- {
- author_name: 'Karlo Soriano',
- author_email: 'karlo@email.com',
- '2013-05-09': { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- },
- {
- author_name: 'Dmitriy Zaporozhets',
- author_email: 'dzaporozhets@email.com',
- '2013-05-08': { date: '2013-05-08', additions: 54, deletions: 7, commits: 3 },
- },
- ],
- };
- var correct_author_data = [
- {
- author_name: 'Dmitriy Zaporozhets',
- author_email: 'dzaporozhets@email.com',
- dates: { '2013-05-08': 3 },
- deletions: 7,
- additions: 54,
- commits: 3,
- },
- {
- author_name: 'Karlo Soriano',
- author_email: 'karlo@email.com',
- dates: { '2013-05-09': 1 },
- deletions: 0,
- additions: 471,
- commits: 1,
- },
- ];
-
- expect(ContributorsStatGraphUtil.get_author_data(fake_parsed_log, 'commits')).toEqual(
- correct_author_data,
- );
- });
- });
-
- describe('#parse_log_entry', function() {
- it('adds the corresponding info from the log entry to the author', function() {
- var fake_log_entry = {
- author_name: 'Karlo Soriano',
- author_email: 'karlo@email.com',
- '2013-05-09': { date: '2013-05-09', additions: 471, deletions: 0, commits: 1 },
- };
- var correct_parsed_log = {
- author_name: 'Karlo Soriano',
- author_email: 'karlo@email.com',
- dates: { '2013-05-09': 1 },
- deletions: 0,
- additions: 471,
- commits: 1,
- };
-
- expect(ContributorsStatGraphUtil.parse_log_entry(fake_log_entry, 'commits', null)).toEqual(
- correct_parsed_log,
- );
- });
- });
-
- describe('#in_range', function() {
- var date = '2013-05-09';
- it('returns true if date_range is null', function() {
- expect(ContributorsStatGraphUtil.in_range(date, null)).toEqual(true);
- });
-
- it('returns true if date is in range', function() {
- var date_range = [new Date('2013-01-01'), new Date('2013-12-12')];
-
- expect(ContributorsStatGraphUtil.in_range(date, date_range)).toEqual(true);
- });
-
- it('returns false if date is not in range', function() {
- var date_range = [new Date('1999-12-01'), new Date('2000-12-01')];
-
- expect(ContributorsStatGraphUtil.in_range(date, date_range)).toEqual(false);
- });
- });
-});
diff --git a/spec/javascripts/ide/components/jobs/stage_spec.js b/spec/javascripts/ide/components/jobs/stage_spec.js
deleted file mode 100644
index fc3831f2d05..00000000000
--- a/spec/javascripts/ide/components/jobs/stage_spec.js
+++ /dev/null
@@ -1,95 +0,0 @@
-import Vue from 'vue';
-import Stage from '~/ide/components/jobs/stage.vue';
-import { stages, jobs } from '../../mock_data';
-
-describe('IDE pipeline stage', () => {
- const Component = Vue.extend(Stage);
- let vm;
- let stage;
-
- beforeEach(() => {
- stage = {
- ...stages[0],
- id: 0,
- dropdownPath: stages[0].dropdown_path,
- jobs: [...jobs],
- isLoading: false,
- isCollapsed: false,
- };
-
- vm = new Component({
- propsData: { stage },
- });
-
- spyOn(vm, '$emit');
-
- vm.$mount();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('emits fetch event when mounted', () => {
- expect(vm.$emit).toHaveBeenCalledWith('fetch', vm.stage);
- });
-
- it('renders stages details', () => {
- expect(vm.$el.textContent).toContain(vm.stage.name);
- });
-
- it('renders CI icon', () => {
- expect(vm.$el.querySelector('.ic-status_failed')).not.toBe(null);
- });
-
- describe('collapsed', () => {
- it('emits event when clicking header', done => {
- vm.$el.querySelector('.card-header').click();
-
- vm.$nextTick(() => {
- expect(vm.$emit).toHaveBeenCalledWith('toggleCollapsed', vm.stage.id);
-
- done();
- });
- });
-
- it('toggles collapse status when collapsed', done => {
- vm.stage.isCollapsed = true;
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.card-body').style.display).toBe('none');
-
- done();
- });
- });
-
- it('sets border bottom class when collapsed', done => {
- vm.stage.isCollapsed = true;
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.card-header').classList).toContain('border-bottom-0');
-
- done();
- });
- });
- });
-
- it('renders jobs count', () => {
- expect(vm.$el.querySelector('.badge').textContent).toContain('4');
- });
-
- it('renders loading icon when no jobs and isLoading is true', done => {
- vm.stage.isLoading = true;
- vm.stage.jobs = [];
-
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.loading-container')).not.toBe(null);
-
- done();
- });
- });
-
- it('renders list of jobs', () => {
- expect(vm.$el.querySelectorAll('.ide-job-item').length).toBe(4);
- });
-});
diff --git a/spec/javascripts/ide/components/repo_editor_spec.js b/spec/javascripts/ide/components/repo_editor_spec.js
index d1b43df74b9..21fb5449858 100644
--- a/spec/javascripts/ide/components/repo_editor_spec.js
+++ b/spec/javascripts/ide/components/repo_editor_spec.js
@@ -261,10 +261,10 @@ describe('RepoEditor', () => {
});
it('updates state when model content changed', done => {
- vm.model.setValue('testing 123');
+ vm.model.setValue('testing 123\n');
setTimeout(() => {
- expect(vm.file.content).toBe('testing 123');
+ expect(vm.file.content).toBe('testing 123\n');
done();
});
diff --git a/spec/javascripts/ide/stores/actions/file_spec.js b/spec/javascripts/ide/stores/actions/file_spec.js
index 021c3076094..03d1125c23a 100644
--- a/spec/javascripts/ide/stores/actions/file_spec.js
+++ b/spec/javascripts/ide/stores/actions/file_spec.js
@@ -182,13 +182,25 @@ describe('IDE store file actions', () => {
spyOn(service, 'getFileData').and.callThrough();
localFile = file(`newCreate-${Math.random()}`);
- localFile.url = `project/getFileDataURL`;
store.state.entries[localFile.path] = localFile;
+
+ store.state.currentProjectId = 'test/test';
+ store.state.currentBranchId = 'master';
+
+ store.state.projects['test/test'] = {
+ branches: {
+ master: {
+ commit: {
+ id: '7297abc',
+ },
+ },
+ },
+ };
});
describe('success', () => {
beforeEach(() => {
- mock.onGet(`${RELATIVE_URL_ROOT}/project/getFileDataURL`).replyOnce(
+ mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce(
200,
{
blame_path: 'blame_path',
@@ -210,7 +222,7 @@ describe('IDE store file actions', () => {
.dispatch('getFileData', { path: localFile.path })
.then(() => {
expect(service.getFileData).toHaveBeenCalledWith(
- `${RELATIVE_URL_ROOT}/project/getFileDataURL`,
+ `${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`,
);
done();
@@ -229,12 +241,11 @@ describe('IDE store file actions', () => {
.catch(done.fail);
});
- it('sets document title', done => {
+ it('sets document title with the branchId', done => {
store
.dispatch('getFileData', { path: localFile.path })
.then(() => {
- expect(document.title).toBe('testing getFileData');
-
+ expect(document.title).toBe(`${localFile.path} · master · test/test · GitLab`);
done();
})
.catch(done.fail);
@@ -283,7 +294,7 @@ describe('IDE store file actions', () => {
localFile.path = 'new-shiny-file';
store.state.entries[localFile.path] = localFile;
- mock.onGet(`${RELATIVE_URL_ROOT}/project/getFileDataURL`).replyOnce(
+ mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/old-dull-file`).replyOnce(
200,
{
blame_path: 'blame_path',
@@ -304,7 +315,7 @@ describe('IDE store file actions', () => {
store
.dispatch('getFileData', { path: localFile.path })
.then(() => {
- expect(document.title).toBe('testing new-shiny-file');
+ expect(document.title).toBe(`new-shiny-file · master · test/test · GitLab`);
done();
})
@@ -314,14 +325,17 @@ describe('IDE store file actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onGet(`project/getFileDataURL`).networkError();
+ mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError();
});
it('dispatches error action', done => {
const dispatch = jasmine.createSpy('dispatch');
actions
- .getFileData({ state: store.state, commit() {}, dispatch }, { path: localFile.path })
+ .getFileData(
+ { state: store.state, commit() {}, dispatch, getters: store.getters },
+ { path: localFile.path },
+ )
.then(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
text: 'An error occurred whilst loading the file.',
@@ -455,6 +469,8 @@ describe('IDE store file actions', () => {
beforeEach(() => {
tmpFile = file('tmpFile');
+ tmpFile.content = '\n';
+ tmpFile.raw = '\n';
store.state.entries[tmpFile.path] = tmpFile;
});
@@ -462,10 +478,24 @@ describe('IDE store file actions', () => {
store
.dispatch('changeFileContent', {
path: tmpFile.path,
+ content: 'content\n',
+ })
+ .then(() => {
+ expect(tmpFile.content).toBe('content\n');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('adds a newline to the end of the file if it doesnt already exist', done => {
+ store
+ .dispatch('changeFileContent', {
+ path: tmpFile.path,
content: 'content',
})
.then(() => {
- expect(tmpFile.content).toBe('content');
+ expect(tmpFile.content).toBe('content\n');
done();
})
@@ -510,12 +540,12 @@ describe('IDE store file actions', () => {
store
.dispatch('changeFileContent', {
path: tmpFile.path,
- content: 'content',
+ content: 'content\n',
})
.then(() =>
store.dispatch('changeFileContent', {
path: tmpFile.path,
- content: '',
+ content: '\n',
}),
)
.then(() => {
diff --git a/spec/javascripts/ide/stores/actions/merge_request_spec.js b/spec/javascripts/ide/stores/actions/merge_request_spec.js
index 4dd0c1150eb..a8894c644be 100644
--- a/spec/javascripts/ide/stores/actions/merge_request_spec.js
+++ b/spec/javascripts/ide/stores/actions/merge_request_spec.js
@@ -356,8 +356,30 @@ describe('IDE store merge request actions', () => {
changes: [],
};
store.state.entries = {
- foo: {},
- bar: {},
+ foo: {
+ type: 'blob',
+ },
+ bar: {
+ type: 'blob',
+ },
+ };
+
+ store.state.currentProjectId = 'test/test';
+ store.state.currentBranchId = 'master';
+
+ store.state.projects['test/test'] = {
+ branches: {
+ master: {
+ commit: {
+ id: '7297abc',
+ },
+ },
+ abcbranch: {
+ commit: {
+ id: '29020fc',
+ },
+ },
+ },
};
const originalDispatch = store.dispatch;
@@ -415,9 +437,11 @@ describe('IDE store merge request actions', () => {
it('updates activity bar view and gets file data, if changes are found', done => {
store.state.entries.foo = {
url: 'test',
+ type: 'blob',
};
store.state.entries.bar = {
url: 'test',
+ type: 'blob',
};
testMergeRequestChanges.changes = [
diff --git a/spec/javascripts/ide/stores/actions/tree_spec.js b/spec/javascripts/ide/stores/actions/tree_spec.js
index 0c3c4147501..e2d8cc195ae 100644
--- a/spec/javascripts/ide/stores/actions/tree_spec.js
+++ b/spec/javascripts/ide/stores/actions/tree_spec.js
@@ -31,7 +31,10 @@ describe('Multi-file store tree actions', () => {
web_url: '',
branches: {
master: {
- workingReference: '1',
+ workingReference: '12345678',
+ commit: {
+ id: '12345678',
+ },
},
},
};
@@ -61,7 +64,7 @@ describe('Multi-file store tree actions', () => {
store
.dispatch('getFiles', basicCallParameters)
.then(() => {
- expect(service.getFiles).toHaveBeenCalledWith('', 'master');
+ expect(service.getFiles).toHaveBeenCalledWith('', '12345678');
done();
})
@@ -99,8 +102,18 @@ describe('Multi-file store tree actions', () => {
store.state.projects = {
'abc/def': {
web_url: `${gl.TEST_HOST}/files`,
+ branches: {
+ 'master-testing': {
+ commit: {
+ id: '12345',
+ },
+ },
+ },
},
};
+ const getters = {
+ findBranch: () => store.state.projects['abc/def'].branches['master-testing'],
+ };
mock.onGet(/(.*)/).replyOnce(500);
@@ -109,6 +122,7 @@ describe('Multi-file store tree actions', () => {
commit() {},
dispatch,
state: store.state,
+ getters,
},
{
projectId: 'abc/def',
diff --git a/spec/javascripts/ide/stores/getters_spec.js b/spec/javascripts/ide/stores/getters_spec.js
index 73a8d993a13..558674cc845 100644
--- a/spec/javascripts/ide/stores/getters_spec.js
+++ b/spec/javascripts/ide/stores/getters_spec.js
@@ -163,20 +163,57 @@ describe('IDE store getters', () => {
describe('currentBranch', () => {
it('returns current projects branch', () => {
- const localGetters = {
- currentProject: {
- branches: {
- master: {
- name: 'master',
- },
+ localState.currentProjectId = 'abcproject';
+ localState.currentBranchId = 'master';
+ localState.projects.abcproject = {
+ name: 'abcproject',
+ branches: {
+ master: {
+ name: 'master',
},
},
};
+ const localGetters = {
+ findBranch: jasmine.createSpy('findBranchSpy'),
+ };
+ getters.currentBranch(localState, localGetters);
+
+ expect(localGetters.findBranch).toHaveBeenCalledWith('abcproject', 'master');
+ });
+ });
+
+ describe('findProject', () => {
+ it('returns the project matching the id', () => {
+ localState.currentProjectId = 'abcproject';
+ localState.projects.abcproject = {
+ name: 'abcproject',
+ };
+
+ expect(getters.findProject(localState)('abcproject').name).toBe('abcproject');
+ });
+ });
+
+ describe('findBranch', () => {
+ let result;
+
+ it('returns the selected branch from a project', () => {
+ localState.currentProjectId = 'abcproject';
localState.currentBranchId = 'master';
+ localState.projects.abcproject = {
+ name: 'abcproject',
+ branches: {
+ master: {
+ name: 'master',
+ },
+ },
+ };
+ const localGetters = {
+ findProject: () => localState.projects.abcproject,
+ };
- expect(getters.currentBranch(localState, localGetters)).toEqual({
- name: 'master',
- });
+ result = getters.findBranch(localState, localGetters)('abcproject', 'master');
+
+ expect(result.name).toBe('master');
});
});
diff --git a/spec/javascripts/ide/stores/modules/commit/actions_spec.js b/spec/javascripts/ide/stores/modules/commit/actions_spec.js
index 95d927065f0..d464f30b947 100644
--- a/spec/javascripts/ide/stores/modules/commit/actions_spec.js
+++ b/spec/javascripts/ide/stores/modules/commit/actions_spec.js
@@ -292,6 +292,8 @@ describe('IDE commit module actions', () => {
type: 'blob',
active: true,
lastCommitSha: TEST_COMMIT_SHA,
+ content: '\n',
+ raw: '\n',
};
Object.assign(store.state, {
@@ -359,7 +361,7 @@ describe('IDE commit module actions', () => {
{
action: commitActionTypes.update,
file_path: jasmine.anything(),
- content: undefined,
+ content: '\n',
encoding: jasmine.anything(),
last_commit_id: undefined,
previous_path: undefined,
@@ -386,7 +388,7 @@ describe('IDE commit module actions', () => {
{
action: commitActionTypes.update,
file_path: jasmine.anything(),
- content: undefined,
+ content: '\n',
encoding: jasmine.anything(),
last_commit_id: TEST_COMMIT_SHA,
previous_path: undefined,
diff --git a/spec/javascripts/ide/stores/utils_spec.js b/spec/javascripts/ide/stores/utils_spec.js
index a477d4fc200..37290864e3d 100644
--- a/spec/javascripts/ide/stores/utils_spec.js
+++ b/spec/javascripts/ide/stores/utils_spec.js
@@ -11,6 +11,23 @@ describe('Multi-file store utils', () => {
});
});
+ describe('setPageTitleForFile', () => {
+ it('sets the document page title for the file passed', () => {
+ const f = {
+ path: 'README.md',
+ };
+
+ const state = {
+ currentBranchId: 'master',
+ currentProjectId: 'test/test',
+ };
+
+ utils.setPageTitleForFile(state, f);
+
+ expect(document.title).toBe('README.md · master · test/test · GitLab');
+ });
+ });
+
describe('findIndexOfFile', () => {
let localState;
@@ -597,4 +614,17 @@ describe('Multi-file store utils', () => {
});
});
});
+
+ describe('addFinalNewlineIfNeeded', () => {
+ it('adds a newline if it doesnt already exist', () => {
+ [
+ { input: 'some text', output: 'some text\n' },
+ { input: 'some text\n', output: 'some text\n' },
+ { input: 'some text\n\n', output: 'some text\n\n' },
+ { input: 'some\n text', output: 'some\n text\n' },
+ ].forEach(({ input, output }) => {
+ expect(utils.addFinalNewlineIfNeeded(input)).toEqual(output);
+ });
+ });
+ });
});
diff --git a/spec/javascripts/issue_show/helpers.js b/spec/javascripts/issue_show/helpers.js
index 5d2ced98ae4..951acfd4e10 100644
--- a/spec/javascripts/issue_show/helpers.js
+++ b/spec/javascripts/issue_show/helpers.js
@@ -1,10 +1 @@
-// eslint-disable-next-line import/prefer-default-export
-export const keyboardDownEvent = (code, metaKey = false, ctrlKey = false) => {
- const e = new CustomEvent('keydown');
-
- e.keyCode = code;
- e.metaKey = metaKey;
- e.ctrlKey = ctrlKey;
-
- return e;
-};
+export * from '../../frontend/issue_show/helpers.js';
diff --git a/spec/javascripts/lib/utils/tick_formats_spec.js b/spec/javascripts/lib/utils/tick_formats_spec.js
deleted file mode 100644
index 283989b4fc8..00000000000
--- a/spec/javascripts/lib/utils/tick_formats_spec.js
+++ /dev/null
@@ -1,40 +0,0 @@
-import { dateTickFormat, initDateFormats } from '~/lib/utils/tick_formats';
-
-import { setLanguage } from '../../helpers/locale_helper';
-
-describe('tick formats', () => {
- describe('dateTickFormat', () => {
- beforeAll(() => {
- setLanguage('de');
- initDateFormats();
- });
-
- afterAll(() => {
- setLanguage(null);
- });
-
- it('returns year for first of January', () => {
- const tick = dateTickFormat(new Date('2001-01-01'));
-
- expect(tick).toBe('2001');
- });
-
- it('returns month for first of February', () => {
- const tick = dateTickFormat(new Date('2001-02-01'));
-
- expect(tick).toBe('Februar');
- });
-
- it('returns day and month for second of February', () => {
- const tick = dateTickFormat(new Date('2001-02-02'));
-
- expect(tick).toBe('2. Feb.');
- });
-
- it('ignores time', () => {
- const tick = dateTickFormat(new Date('2001-02-02 12:34:56'));
-
- expect(tick).toBe('2. Feb.');
- });
- });
-});
diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js
index 72d6e832aca..54071ccc5c2 100644
--- a/spec/javascripts/merge_request_spec.js
+++ b/spec/javascripts/merge_request_spec.js
@@ -1,5 +1,3 @@
-/* eslint-disable no-return-assign */
-
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
@@ -22,7 +20,8 @@ describe('MergeRequest', function() {
.onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`)
.reply(200, {});
- return (this.merge = new MergeRequest());
+ this.merge = new MergeRequest();
+ return this.merge;
});
afterEach(() => {
@@ -34,10 +33,30 @@ describe('MergeRequest', function() {
const changeEvent = document.createEvent('HTMLEvents');
changeEvent.initEvent('change', true, true);
$('input[type=checkbox]')
+ .first()
+ .attr('checked', true)[0]
+ .dispatchEvent(changeEvent);
+ setTimeout(() => {
+ expect($('.js-task-list-field').val()).toBe(
+ '- [x] Task List Item\n- [ ] \n- [ ] Task List Item 2\n',
+ );
+ done();
+ });
+ });
+
+ it('ensure that task with only spaces does not get checked incorrectly', done => {
+ // fixed in 'deckar01-task_list', '2.2.1' gem
+ spyOn($, 'ajax').and.stub();
+ const changeEvent = document.createEvent('HTMLEvents');
+ changeEvent.initEvent('change', true, true);
+ $('input[type=checkbox]')
+ .last()
.attr('checked', true)[0]
.dispatchEvent(changeEvent);
setTimeout(() => {
- expect($('.js-task-list-field').val()).toBe('- [x] Task List Item');
+ expect($('.js-task-list-field').val()).toBe(
+ '- [ ] Task List Item\n- [ ] \n- [x] Task List Item 2\n',
+ );
done();
});
});
@@ -59,7 +78,7 @@ describe('MergeRequest', function() {
`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`,
{
merge_request: {
- description: '- [ ] Task List Item',
+ description: '- [ ] Task List Item\n- [ ] \n- [ ] Task List Item 2\n',
lock_version: 0,
update_task: { line_number: lineNumber, line_source: lineSource, index, checked },
},
@@ -70,7 +89,8 @@ describe('MergeRequest', function() {
});
});
- it('shows an error notification when tasklist update failed', done => {
+ // eslint-disable-next-line jasmine/no-disabled-tests
+ xit('shows an error notification when tasklist update failed', done => {
mock
.onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`)
.reply(409, {});
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index b424cbc866d..73b1ea4d36f 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -1,4 +1,3 @@
-/* eslint-disable no-var */
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
@@ -11,9 +10,9 @@ import initMrPage from './helpers/init_vue_mr_page_helper';
describe('MergeRequestTabs', function() {
let mrPageMock;
- var stubLocation = {};
- var setLocation = function(stubs) {
- var defaults = {
+ const stubLocation = {};
+ const setLocation = function(stubs) {
+ const defaults = {
pathname: '',
search: '',
hash: '',
@@ -44,9 +43,9 @@ describe('MergeRequestTabs', function() {
});
describe('opensInNewTab', function() {
- var tabUrl;
- var windowTarget = '_blank';
+ const windowTarget = '_blank';
let clickTabParams;
+ let tabUrl;
beforeEach(function() {
loadFixtures('merge_requests/merge_request_with_task_list.html');
@@ -193,11 +192,10 @@ describe('MergeRequestTabs', function() {
});
it('replaces the current history state', function() {
- var newState;
setLocation({
pathname: '/foo/bar/merge_requests/1',
});
- newState = this.subject('commits');
+ const newState = this.subject('commits');
expect(this.spies.history).toHaveBeenCalledWith(
{
diff --git a/spec/javascripts/monitoring/charts/heatmap_spec.js b/spec/javascripts/monitoring/charts/heatmap_spec.js
new file mode 100644
index 00000000000..9a98fc6fb05
--- /dev/null
+++ b/spec/javascripts/monitoring/charts/heatmap_spec.js
@@ -0,0 +1,69 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlHeatmap } from '@gitlab/ui/dist/charts';
+import Heatmap from '~/monitoring/components/charts/heatmap.vue';
+import { graphDataPrometheusQueryRangeMultiTrack } from '../mock_data';
+
+describe('Heatmap component', () => {
+ let heatmapChart;
+ let store;
+
+ beforeEach(() => {
+ heatmapChart = shallowMount(Heatmap, {
+ propsData: {
+ graphData: graphDataPrometheusQueryRangeMultiTrack,
+ containerWidth: 100,
+ },
+ store,
+ });
+ });
+
+ afterEach(() => {
+ heatmapChart.destroy();
+ });
+
+ describe('wrapped components', () => {
+ describe('GitLab UI heatmap chart', () => {
+ let glHeatmapChart;
+
+ beforeEach(() => {
+ glHeatmapChart = heatmapChart.find(GlHeatmap);
+ });
+
+ it('is a Vue instance', () => {
+ expect(glHeatmapChart.isVueInstance()).toBe(true);
+ });
+
+ it('should display a label on the x axis', () => {
+ expect(heatmapChart.vm.xAxisName).toBe(graphDataPrometheusQueryRangeMultiTrack.x_label);
+ });
+
+ it('should display a label on the y axis', () => {
+ expect(heatmapChart.vm.yAxisName).toBe(graphDataPrometheusQueryRangeMultiTrack.y_label);
+ });
+
+ // According to the echarts docs https://echarts.apache.org/en/option.html#series-heatmap.data
+ // each row of the heatmap chart is represented by an array inside another parent array
+ // e.g. [[0, 0, 10]], the format represents the column, the row and finally the value
+ // corresponding to the cell
+
+ it('should return chartData with a length of x by y, with a length of 3 per array', () => {
+ const row = heatmapChart.vm.chartData[0];
+
+ expect(row.length).toBe(3);
+ expect(heatmapChart.vm.chartData.length).toBe(30);
+ });
+
+ it('returns a series of labels for the x axis', () => {
+ const { xAxisLabels } = heatmapChart.vm;
+
+ expect(xAxisLabels.length).toBe(5);
+ });
+
+ it('returns a series of labels for the y axis', () => {
+ const { yAxisLabels } = heatmapChart.vm;
+
+ expect(yAxisLabels.length).toBe(6);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/components/dashboard_spec.js b/spec/javascripts/monitoring/components/dashboard_spec.js
index 75df2ce3103..0f20171726c 100644
--- a/spec/javascripts/monitoring/components/dashboard_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_spec.js
@@ -7,11 +7,12 @@ import Dashboard from '~/monitoring/components/dashboard.vue';
import * as types from '~/monitoring/stores/mutation_types';
import { createStore } from '~/monitoring/stores';
import axios from '~/lib/utils/axios_utils';
-import MonitoringMock, {
+import {
metricsGroupsAPIResponse,
+ mockedQueryResultPayload,
+ mockedQueryResultPayloadCoresTotal,
mockApiEndpoint,
environmentData,
- singleGroupResponse,
dashboardGitResponse,
} from '../mock_data';
@@ -44,12 +45,33 @@ const resetSpy = spy => {
export default propsData;
+function setupComponentStore(component) {
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ metricsGroupsAPIResponse,
+ );
+
+ // Load 2 panels to the dashboard
+ component.$store.commit(
+ `monitoringDashboard/${types.SET_QUERY_RESULT}`,
+ mockedQueryResultPayload,
+ );
+ component.$store.commit(
+ `monitoringDashboard/${types.SET_QUERY_RESULT}`,
+ mockedQueryResultPayloadCoresTotal,
+ );
+
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
+ environmentData,
+ );
+}
+
describe('Dashboard', () => {
let DashboardComponent;
let mock;
let store;
let component;
- let mockGraphData;
beforeEach(() => {
setFixtures(`
@@ -100,6 +122,32 @@ describe('Dashboard', () => {
});
});
+ describe('cluster health', () => {
+ let wrapper;
+
+ beforeEach(done => {
+ wrapper = shallowMount(DashboardComponent, {
+ localVue,
+ sync: false,
+ propsData: { ...propsData, hasMetrics: true },
+ store,
+ });
+
+ // all_dashboards is not defined in health dashboards
+ wrapper.vm.$store.commit(`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`, undefined);
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders correctly', () => {
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.exists()).toBe(true);
+ });
+ });
+
describe('requests information to the server', () => {
let spy;
beforeEach(() => {
@@ -123,25 +171,6 @@ describe('Dashboard', () => {
});
});
- it('hides the legend when showLegend is false', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showLegend: false,
- },
- store,
- });
-
- setTimeout(() => {
- expect(component.showEmptyState).toEqual(false);
- expect(component.$el.querySelector('.legend-group')).toEqual(null);
- expect(component.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
- done();
- });
- });
-
it('hides the group panels when showPanels is false', done => {
component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
@@ -153,52 +182,66 @@ describe('Dashboard', () => {
store,
});
- setTimeout(() => {
- expect(component.showEmptyState).toEqual(false);
- expect(component.$el.querySelector('.prometheus-panel')).toEqual(null);
- expect(component.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
- done();
- });
+ setupComponentStore(component);
+
+ Vue.nextTick()
+ .then(() => {
+ expect(component.showEmptyState).toEqual(false);
+ expect(component.$el.querySelector('.prometheus-panel')).toEqual(null);
+ expect(component.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
+
+ done();
+ })
+ .catch(done.fail);
});
- it('renders the environments dropdown with a number of environments', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- },
- store,
+ describe('when all the requests have been commited by the store', () => {
+ beforeEach(() => {
+ component = new DashboardComponent({
+ el: document.querySelector('.prometheus-graphs'),
+ propsData: {
+ ...propsData,
+ hasMetrics: true,
+ },
+ store,
+ });
+
+ setupComponentStore(component);
});
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- singleGroupResponse,
- );
+ it('renders the environments dropdown with a number of environments', done => {
+ Vue.nextTick()
+ .then(() => {
+ const dropdownMenuEnvironments = component.$el.querySelectorAll(
+ '.js-environments-dropdown .dropdown-item',
+ );
- Vue.nextTick()
- .then(() => {
- const dropdownMenuEnvironments = component.$el.querySelectorAll(
- '.js-environments-dropdown .dropdown-item',
- );
+ expect(component.environments.length).toEqual(environmentData.length);
+ expect(dropdownMenuEnvironments.length).toEqual(component.environments.length);
- expect(component.environments.length).toEqual(environmentData.length);
- expect(dropdownMenuEnvironments.length).toEqual(component.environments.length);
+ Array.from(dropdownMenuEnvironments).forEach((value, index) => {
+ if (environmentData[index].metrics_path) {
+ expect(value).toHaveAttr('href', environmentData[index].metrics_path);
+ }
+ });
- Array.from(dropdownMenuEnvironments).forEach((value, index) => {
- if (environmentData[index].metrics_path) {
- expect(value).toHaveAttr('href', environmentData[index].metrics_path);
- }
- });
+ done();
+ })
+ .catch(done.fail);
+ });
- done();
- })
- .catch(done.fail);
+ it('renders the environments dropdown with a single active element', done => {
+ Vue.nextTick()
+ .then(() => {
+ const dropdownItems = component.$el.querySelectorAll(
+ '.js-environments-dropdown .dropdown-item.active',
+ );
+
+ expect(dropdownItems.length).toEqual(1);
+ done();
+ })
+ .catch(done.fail);
+ });
});
it('hides the environments dropdown list when there is no environments', done => {
@@ -207,15 +250,17 @@ describe('Dashboard', () => {
propsData: {
...propsData,
hasMetrics: true,
- showPanels: false,
},
store,
});
- component.$store.commit(`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`, []);
component.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- singleGroupResponse,
+ metricsGroupsAPIResponse,
+ );
+ component.$store.commit(
+ `monitoringDashboard/${types.SET_QUERY_RESULT}`,
+ mockedQueryResultPayload,
);
Vue.nextTick()
@@ -230,7 +275,7 @@ describe('Dashboard', () => {
.catch(done.fail);
});
- it('renders the environments dropdown with a single active element', done => {
+ it('renders the datetimepicker dropdown', done => {
component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
propsData: {
@@ -241,64 +286,16 @@ describe('Dashboard', () => {
store,
});
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- singleGroupResponse,
- );
+ setupComponentStore(component);
Vue.nextTick()
.then(() => {
- const dropdownItems = component.$el.querySelectorAll(
- '.js-environments-dropdown .dropdown-item.active',
- );
-
- expect(dropdownItems.length).toEqual(1);
+ expect(component.$el.querySelector('.js-time-window-dropdown')).not.toBeNull();
done();
})
.catch(done.fail);
});
- it('hides the dropdown', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- environmentsEndpoint: '',
- },
- store,
- });
-
- Vue.nextTick(() => {
- const dropdownIsActiveElement = component.$el.querySelectorAll('.environments');
-
- expect(dropdownIsActiveElement.length).toEqual(0);
- done();
- });
- });
-
- it('renders the datetimepicker dropdown', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- },
- store,
- });
-
- setTimeout(() => {
- expect(component.$el.querySelector('.js-time-window-dropdown')).not.toBeNull();
- done();
- });
- });
-
it('fetches the metrics data with proper time window', done => {
component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
@@ -347,14 +344,21 @@ describe('Dashboard', () => {
el: document.querySelector('.prometheus-graphs'),
propsData: { ...propsData, hasMetrics: true },
store,
+ sync: false,
});
- setTimeout(() => {
- const selectedTimeWindow = component.$el.querySelector('.js-time-window-dropdown .active');
+ setupComponentStore(component);
- expect(selectedTimeWindow.textContent.trim()).toEqual('30 minutes');
- done();
- });
+ Vue.nextTick()
+ .then(() => {
+ const selectedTimeWindow = component.$el.querySelector(
+ '.js-time-window-dropdown .active',
+ );
+
+ expect(selectedTimeWindow.textContent.trim()).toEqual('30 minutes');
+ done();
+ })
+ .catch(done.fail);
});
it('shows an error message if invalid url parameters are passed', done => {
@@ -381,29 +385,36 @@ describe('Dashboard', () => {
describe('drag and drop function', () => {
let wrapper;
let expectedPanelCount; // also called metrics, naming to be improved: https://gitlab.com/gitlab-org/gitlab/issues/31565
+
const findDraggables = () => wrapper.findAll(VueDraggable);
const findEnabledDraggables = () => findDraggables().filter(f => !f.attributes('disabled'));
const findDraggablePanels = () => wrapper.findAll('.js-draggable-panel');
const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
- beforeEach(done => {
+ beforeEach(() => {
mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
- expectedPanelCount = metricsGroupsAPIResponse.data.reduce(
- (acc, d) => d.metrics.length + acc,
+ expectedPanelCount = metricsGroupsAPIResponse.reduce(
+ (acc, group) => group.panels.length + acc,
0,
);
- store.dispatch('monitoringDashboard/setFeatureFlags', { additionalPanelTypesEnabled: true });
+ });
+ beforeEach(done => {
wrapper = shallowMount(DashboardComponent, {
localVue,
sync: false,
propsData: { ...propsData, hasMetrics: true },
store,
+ attachToDocument: true,
});
- // not using $nextTicket becuase we must wait for the dashboard
- // to be populated with the mock data results.
- setTimeout(done);
+ setupComponentStore(wrapper.vm);
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
});
it('wraps vuedraggable', () => {
@@ -442,6 +453,28 @@ describe('Dashboard', () => {
expect(findEnabledDraggables()).toEqual(findDraggables());
});
+ it('metrics can be swapped', done => {
+ const firstDraggable = findDraggables().at(0);
+ const mockMetrics = [...metricsGroupsAPIResponse[0].panels];
+ const value = () => firstDraggable.props('value');
+
+ expect(value().length).toBe(mockMetrics.length);
+ value().forEach((metric, i) => {
+ expect(metric.title).toBe(mockMetrics[i].title);
+ });
+
+ // swap two elements and `input` them
+ [mockMetrics[0], mockMetrics[1]] = [mockMetrics[1], mockMetrics[0]];
+ firstDraggable.vm.$emit('input', mockMetrics);
+
+ firstDraggable.vm.$nextTick(() => {
+ value().forEach((metric, i) => {
+ expect(metric.title).toBe(mockMetrics[i].title);
+ });
+ done();
+ });
+ });
+
it('shows a remove button, which removes a panel', done => {
expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
@@ -449,8 +482,6 @@ describe('Dashboard', () => {
findFirstDraggableRemoveButton().trigger('click');
wrapper.vm.$nextTick(() => {
- // At present graphs will not be removed in backend
- // See https://gitlab.com/gitlab-org/gitlab/issues/27835
expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
done();
});
@@ -466,10 +497,6 @@ describe('Dashboard', () => {
});
});
});
-
- afterEach(() => {
- wrapper.destroy();
- });
});
// https://gitlab.com/gitlab-org/gitlab-ce/issues/66922
@@ -539,42 +566,93 @@ describe('Dashboard', () => {
});
});
- describe('when the window resizes', () => {
+ describe('responds to window resizes', () => {
+ let promPanel;
+ let promGroup;
+ let panelToggle;
+ let chart;
beforeEach(() => {
mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
- jasmine.clock().install();
- });
- afterEach(() => {
- jasmine.clock().uninstall();
- });
-
- it('sets elWidth to page width when the sidebar is resized', done => {
component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
propsData: {
...propsData,
hasMetrics: true,
- showPanels: false,
+ showPanels: true,
},
store,
});
- expect(component.elWidth).toEqual(0);
+ setupComponentStore(component);
- const pageLayoutEl = document.querySelector('.layout-page');
- pageLayoutEl.classList.add('page-with-icon-sidebar');
+ return Vue.nextTick().then(() => {
+ promPanel = component.$el.querySelector('.prometheus-panel');
+ promGroup = promPanel.querySelector('.prometheus-graph-group');
+ panelToggle = promPanel.querySelector('.js-graph-group-toggle');
+ chart = promGroup.querySelector('.position-relative svg');
+ });
+ });
- Vue.nextTick()
- .then(() => {
- jasmine.clock().tick(1000);
- return Vue.nextTick();
- })
- .then(() => {
- expect(component.elWidth).toEqual(pageLayoutEl.clientWidth);
- done();
- })
- .catch(done.fail);
+ it('setting chart size to zero when panel group is hidden', () => {
+ expect(promGroup.style.display).toBe('');
+ expect(chart.clientWidth).toBeGreaterThan(0);
+
+ panelToggle.click();
+ return Vue.nextTick().then(() => {
+ expect(promGroup.style.display).toBe('none');
+ expect(chart.clientWidth).toBe(0);
+ promPanel.style.width = '500px';
+ });
+ });
+
+ it('expanding chart panel group after resize displays chart', () => {
+ panelToggle.click();
+
+ expect(chart.clientWidth).toBeGreaterThan(0);
+ });
+ });
+
+ describe('dashboard edit link', () => {
+ let wrapper;
+ const findEditLink = () => wrapper.find('.js-edit-link');
+
+ beforeEach(done => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+
+ wrapper = shallowMount(DashboardComponent, {
+ localVue,
+ sync: false,
+ attachToDocument: true,
+ propsData: { ...propsData, hasMetrics: true },
+ store,
+ });
+
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
+ dashboardGitResponse,
+ );
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('is not present for the default dashboard', () => {
+ expect(findEditLink().exists()).toBe(false);
+ });
+
+ it('is present for a custom dashboard, and links to its edit_path', done => {
+ const dashboard = dashboardGitResponse[1]; // non-default dashboard
+ const currentDashboard = dashboard.path;
+
+ wrapper.setProps({ currentDashboard });
+ wrapper.vm.$nextTick(() => {
+ expect(findEditLink().exists()).toBe(true);
+ expect(findEditLink().attributes('href')).toBe(dashboard.project_blob_path);
+ done();
+ });
});
});
@@ -619,20 +697,6 @@ describe('Dashboard', () => {
store,
});
- component.$store.dispatch('monitoringDashboard/setFeatureFlags', {
- prometheusEndpoint: false,
- });
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- singleGroupResponse,
- );
-
component.$store.commit(
`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
dashboardGitResponse,
@@ -648,36 +712,4 @@ describe('Dashboard', () => {
});
});
});
-
- describe('when downloading metrics data as CSV', () => {
- beforeEach(() => {
- component = new DashboardComponent({
- propsData: {
- ...propsData,
- },
- store,
- });
- store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- MonitoringMock.data,
- );
- [mockGraphData] = component.$store.state.monitoringDashboard.groups[0].metrics;
- });
-
- describe('csvText', () => {
- it('converts metrics data from json to csv', () => {
- const header = `timestamp,${mockGraphData.y_label}`;
- const data = mockGraphData.queries[0].result[0].values;
- const firstRow = `${data[0][0]},${data[0][1]}`;
-
- expect(component.csvText(mockGraphData)).toMatch(`^${header}\r\n${firstRow}`);
- });
- });
-
- describe('downloadCsv', () => {
- it('produces a link with a Blob', () => {
- expect(component.downloadCsv(mockGraphData)).toContain(`blob:`);
- });
- });
- });
});
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
index 17e7314e214..f9cc839bde6 100644
--- a/spec/javascripts/monitoring/mock_data.js
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -1,943 +1,103 @@
-export const mockApiEndpoint = `${gl.TEST_HOST}/monitoring/mock`;
+import {
+ anomalyMockGraphData as importedAnomalyMockGraphData,
+ metricsGroupsAPIResponse as importedMetricsGroupsAPIResponse,
+ environmentData as importedEnvironmentData,
+ dashboardGitResponse as importedDashboardGitResponse,
+} from '../../frontend/monitoring/mock_data';
-export const mockProjectPath = '/frontend-fixtures/environments-project';
+export const anomalyMockGraphData = importedAnomalyMockGraphData;
+export const metricsGroupsAPIResponse = importedMetricsGroupsAPIResponse;
+export const environmentData = importedEnvironmentData;
+export const dashboardGitResponse = importedDashboardGitResponse;
-export const metricsGroupsAPIResponse = {
- success: true,
- data: [
- {
- group: 'Kubernetes',
- priority: 1,
- metrics: [
- {
- id: 5,
- title: 'Memory usage',
- weight: 1,
- queries: [
- {
- query_range: 'avg(container_memory_usage_bytes{%{environment_filter}}) / 2^20',
- label: 'Memory',
- unit: 'MiB',
- result: [
- {
- metric: {},
- values: [
- [1495700554.925, '8.0390625'],
- [1495700614.925, '8.0390625'],
- [1495700674.925, '8.0390625'],
- [1495700734.925, '8.0390625'],
- [1495700794.925, '8.0390625'],
- [1495700854.925, '8.0390625'],
- [1495700914.925, '8.0390625'],
- [1495700974.925, '8.0390625'],
- [1495701034.925, '8.0390625'],
- [1495701094.925, '8.0390625'],
- [1495701154.925, '8.0390625'],
- [1495701214.925, '8.0390625'],
- [1495701274.925, '8.0390625'],
- [1495701334.925, '8.0390625'],
- [1495701394.925, '8.0390625'],
- [1495701454.925, '8.0390625'],
- [1495701514.925, '8.0390625'],
- [1495701574.925, '8.0390625'],
- [1495701634.925, '8.0390625'],
- [1495701694.925, '8.0390625'],
- [1495701754.925, '8.0390625'],
- [1495701814.925, '8.0390625'],
- [1495701874.925, '8.0390625'],
- [1495701934.925, '8.0390625'],
- [1495701994.925, '8.0390625'],
- [1495702054.925, '8.0390625'],
- [1495702114.925, '8.0390625'],
- [1495702174.925, '8.0390625'],
- [1495702234.925, '8.0390625'],
- [1495702294.925, '8.0390625'],
- [1495702354.925, '8.0390625'],
- [1495702414.925, '8.0390625'],
- [1495702474.925, '8.0390625'],
- [1495702534.925, '8.0390625'],
- [1495702594.925, '8.0390625'],
- [1495702654.925, '8.0390625'],
- [1495702714.925, '8.0390625'],
- [1495702774.925, '8.0390625'],
- [1495702834.925, '8.0390625'],
- [1495702894.925, '8.0390625'],
- [1495702954.925, '8.0390625'],
- [1495703014.925, '8.0390625'],
- [1495703074.925, '8.0390625'],
- [1495703134.925, '8.0390625'],
- [1495703194.925, '8.0390625'],
- [1495703254.925, '8.03515625'],
- [1495703314.925, '8.03515625'],
- [1495703374.925, '8.03515625'],
- [1495703434.925, '8.03515625'],
- [1495703494.925, '8.03515625'],
- [1495703554.925, '8.03515625'],
- [1495703614.925, '8.03515625'],
- [1495703674.925, '8.03515625'],
- [1495703734.925, '8.03515625'],
- [1495703794.925, '8.03515625'],
- [1495703854.925, '8.03515625'],
- [1495703914.925, '8.03515625'],
- [1495703974.925, '8.03515625'],
- [1495704034.925, '8.03515625'],
- [1495704094.925, '8.03515625'],
- [1495704154.925, '8.03515625'],
- [1495704214.925, '7.9296875'],
- [1495704274.925, '7.9296875'],
- [1495704334.925, '7.9296875'],
- [1495704394.925, '7.9296875'],
- [1495704454.925, '7.9296875'],
- [1495704514.925, '7.9296875'],
- [1495704574.925, '7.9296875'],
- [1495704634.925, '7.9296875'],
- [1495704694.925, '7.9296875'],
- [1495704754.925, '7.9296875'],
- [1495704814.925, '7.9296875'],
- [1495704874.925, '7.9296875'],
- [1495704934.925, '7.9296875'],
- [1495704994.925, '7.9296875'],
- [1495705054.925, '7.9296875'],
- [1495705114.925, '7.9296875'],
- [1495705174.925, '7.9296875'],
- [1495705234.925, '7.9296875'],
- [1495705294.925, '7.9296875'],
- [1495705354.925, '7.9296875'],
- [1495705414.925, '7.9296875'],
- [1495705474.925, '7.9296875'],
- [1495705534.925, '7.9296875'],
- [1495705594.925, '7.9296875'],
- [1495705654.925, '7.9296875'],
- [1495705714.925, '7.9296875'],
- [1495705774.925, '7.9296875'],
- [1495705834.925, '7.9296875'],
- [1495705894.925, '7.9296875'],
- [1495705954.925, '7.9296875'],
- [1495706014.925, '7.9296875'],
- [1495706074.925, '7.9296875'],
- [1495706134.925, '7.9296875'],
- [1495706194.925, '7.9296875'],
- [1495706254.925, '7.9296875'],
- [1495706314.925, '7.9296875'],
- [1495706374.925, '7.9296875'],
- [1495706434.925, '7.9296875'],
- [1495706494.925, '7.9296875'],
- [1495706554.925, '7.9296875'],
- [1495706614.925, '7.9296875'],
- [1495706674.925, '7.9296875'],
- [1495706734.925, '7.9296875'],
- [1495706794.925, '7.9296875'],
- [1495706854.925, '7.9296875'],
- [1495706914.925, '7.9296875'],
- [1495706974.925, '7.9296875'],
- [1495707034.925, '7.9296875'],
- [1495707094.925, '7.9296875'],
- [1495707154.925, '7.9296875'],
- [1495707214.925, '7.9296875'],
- [1495707274.925, '7.9296875'],
- [1495707334.925, '7.9296875'],
- [1495707394.925, '7.9296875'],
- [1495707454.925, '7.9296875'],
- [1495707514.925, '7.9296875'],
- [1495707574.925, '7.9296875'],
- [1495707634.925, '7.9296875'],
- [1495707694.925, '7.9296875'],
- [1495707754.925, '7.9296875'],
- [1495707814.925, '7.9296875'],
- [1495707874.925, '7.9296875'],
- [1495707934.925, '7.9296875'],
- [1495707994.925, '7.9296875'],
- [1495708054.925, '7.9296875'],
- [1495708114.925, '7.9296875'],
- [1495708174.925, '7.9296875'],
- [1495708234.925, '7.9296875'],
- [1495708294.925, '7.9296875'],
- [1495708354.925, '7.9296875'],
- [1495708414.925, '7.9296875'],
- [1495708474.925, '7.9296875'],
- [1495708534.925, '7.9296875'],
- [1495708594.925, '7.9296875'],
- [1495708654.925, '7.9296875'],
- [1495708714.925, '7.9296875'],
- [1495708774.925, '7.9296875'],
- [1495708834.925, '7.9296875'],
- [1495708894.925, '7.9296875'],
- [1495708954.925, '7.8984375'],
- [1495709014.925, '7.8984375'],
- [1495709074.925, '7.8984375'],
- [1495709134.925, '7.8984375'],
- [1495709194.925, '7.8984375'],
- [1495709254.925, '7.89453125'],
- [1495709314.925, '7.89453125'],
- [1495709374.925, '7.89453125'],
- [1495709434.925, '7.89453125'],
- [1495709494.925, '7.89453125'],
- [1495709554.925, '7.89453125'],
- [1495709614.925, '7.89453125'],
- [1495709674.925, '7.89453125'],
- [1495709734.925, '7.89453125'],
- [1495709794.925, '7.89453125'],
- [1495709854.925, '7.89453125'],
- [1495709914.925, '7.89453125'],
- [1495709974.925, '7.89453125'],
- [1495710034.925, '7.89453125'],
- [1495710094.925, '7.89453125'],
- [1495710154.925, '7.89453125'],
- [1495710214.925, '7.89453125'],
- [1495710274.925, '7.89453125'],
- [1495710334.925, '7.89453125'],
- [1495710394.925, '7.89453125'],
- [1495710454.925, '7.89453125'],
- [1495710514.925, '7.89453125'],
- [1495710574.925, '7.89453125'],
- [1495710634.925, '7.89453125'],
- [1495710694.925, '7.89453125'],
- [1495710754.925, '7.89453125'],
- [1495710814.925, '7.89453125'],
- [1495710874.925, '7.89453125'],
- [1495710934.925, '7.89453125'],
- [1495710994.925, '7.89453125'],
- [1495711054.925, '7.89453125'],
- [1495711114.925, '7.89453125'],
- [1495711174.925, '7.8515625'],
- [1495711234.925, '7.8515625'],
- [1495711294.925, '7.8515625'],
- [1495711354.925, '7.8515625'],
- [1495711414.925, '7.8515625'],
- [1495711474.925, '7.8515625'],
- [1495711534.925, '7.8515625'],
- [1495711594.925, '7.8515625'],
- [1495711654.925, '7.8515625'],
- [1495711714.925, '7.8515625'],
- [1495711774.925, '7.8515625'],
- [1495711834.925, '7.8515625'],
- [1495711894.925, '7.8515625'],
- [1495711954.925, '7.8515625'],
- [1495712014.925, '7.8515625'],
- [1495712074.925, '7.8515625'],
- [1495712134.925, '7.8515625'],
- [1495712194.925, '7.8515625'],
- [1495712254.925, '7.8515625'],
- [1495712314.925, '7.8515625'],
- [1495712374.925, '7.8515625'],
- [1495712434.925, '7.83203125'],
- [1495712494.925, '7.83203125'],
- [1495712554.925, '7.83203125'],
- [1495712614.925, '7.83203125'],
- [1495712674.925, '7.83203125'],
- [1495712734.925, '7.83203125'],
- [1495712794.925, '7.83203125'],
- [1495712854.925, '7.83203125'],
- [1495712914.925, '7.83203125'],
- [1495712974.925, '7.83203125'],
- [1495713034.925, '7.83203125'],
- [1495713094.925, '7.83203125'],
- [1495713154.925, '7.83203125'],
- [1495713214.925, '7.83203125'],
- [1495713274.925, '7.83203125'],
- [1495713334.925, '7.83203125'],
- [1495713394.925, '7.8125'],
- [1495713454.925, '7.8125'],
- [1495713514.925, '7.8125'],
- [1495713574.925, '7.8125'],
- [1495713634.925, '7.8125'],
- [1495713694.925, '7.8125'],
- [1495713754.925, '7.8125'],
- [1495713814.925, '7.8125'],
- [1495713874.925, '7.8125'],
- [1495713934.925, '7.8125'],
- [1495713994.925, '7.8125'],
- [1495714054.925, '7.8125'],
- [1495714114.925, '7.8125'],
- [1495714174.925, '7.8125'],
- [1495714234.925, '7.8125'],
- [1495714294.925, '7.8125'],
- [1495714354.925, '7.80859375'],
- [1495714414.925, '7.80859375'],
- [1495714474.925, '7.80859375'],
- [1495714534.925, '7.80859375'],
- [1495714594.925, '7.80859375'],
- [1495714654.925, '7.80859375'],
- [1495714714.925, '7.80859375'],
- [1495714774.925, '7.80859375'],
- [1495714834.925, '7.80859375'],
- [1495714894.925, '7.80859375'],
- [1495714954.925, '7.80859375'],
- [1495715014.925, '7.80859375'],
- [1495715074.925, '7.80859375'],
- [1495715134.925, '7.80859375'],
- [1495715194.925, '7.80859375'],
- [1495715254.925, '7.80859375'],
- [1495715314.925, '7.80859375'],
- [1495715374.925, '7.80859375'],
- [1495715434.925, '7.80859375'],
- [1495715494.925, '7.80859375'],
- [1495715554.925, '7.80859375'],
- [1495715614.925, '7.80859375'],
- [1495715674.925, '7.80859375'],
- [1495715734.925, '7.80859375'],
- [1495715794.925, '7.80859375'],
- [1495715854.925, '7.80859375'],
- [1495715914.925, '7.80078125'],
- [1495715974.925, '7.80078125'],
- [1495716034.925, '7.80078125'],
- [1495716094.925, '7.80078125'],
- [1495716154.925, '7.80078125'],
- [1495716214.925, '7.796875'],
- [1495716274.925, '7.796875'],
- [1495716334.925, '7.796875'],
- [1495716394.925, '7.796875'],
- [1495716454.925, '7.796875'],
- [1495716514.925, '7.796875'],
- [1495716574.925, '7.796875'],
- [1495716634.925, '7.796875'],
- [1495716694.925, '7.796875'],
- [1495716754.925, '7.796875'],
- [1495716814.925, '7.796875'],
- [1495716874.925, '7.79296875'],
- [1495716934.925, '7.79296875'],
- [1495716994.925, '7.79296875'],
- [1495717054.925, '7.79296875'],
- [1495717114.925, '7.79296875'],
- [1495717174.925, '7.7890625'],
- [1495717234.925, '7.7890625'],
- [1495717294.925, '7.7890625'],
- [1495717354.925, '7.7890625'],
- [1495717414.925, '7.7890625'],
- [1495717474.925, '7.7890625'],
- [1495717534.925, '7.7890625'],
- [1495717594.925, '7.7890625'],
- [1495717654.925, '7.7890625'],
- [1495717714.925, '7.7890625'],
- [1495717774.925, '7.7890625'],
- [1495717834.925, '7.77734375'],
- [1495717894.925, '7.77734375'],
- [1495717954.925, '7.77734375'],
- [1495718014.925, '7.77734375'],
- [1495718074.925, '7.77734375'],
- [1495718134.925, '7.7421875'],
- [1495718194.925, '7.7421875'],
- [1495718254.925, '7.7421875'],
- [1495718314.925, '7.7421875'],
- ],
- },
- ],
- },
- ],
- },
- {
- id: 6,
- title: 'CPU usage',
- y_label: 'CPU',
- weight: 1,
- queries: [
- {
- appearance: {
- line: {
- width: 2,
- },
- },
- query_range:
- 'avg(rate(container_cpu_usage_seconds_total{%{environment_filter}}[2m])) * 100',
- label: 'Core Usage',
- unit: 'Cores',
- result: [
- {
- metric: {},
- values: [
- [1495700554.925, '0.0010794445585559514'],
- [1495700614.925, '0.003927214935433527'],
- [1495700674.925, '0.0053045219047619975'],
- [1495700734.925, '0.0048892095238097155'],
- [1495700794.925, '0.005827140952381137'],
- [1495700854.925, '0.00569846906219937'],
- [1495700914.925, '0.004972616802849382'],
- [1495700974.925, '0.005117509523809902'],
- [1495701034.925, '0.00512389061919564'],
- [1495701094.925, '0.005199100501890691'],
- [1495701154.925, '0.005415746394885837'],
- [1495701214.925, '0.005607682788146286'],
- [1495701274.925, '0.005641300000000118'],
- [1495701334.925, '0.0071166279368766495'],
- [1495701394.925, '0.0063242138095234044'],
- [1495701454.925, '0.005793314698235304'],
- [1495701514.925, '0.00703934942237556'],
- [1495701574.925, '0.006357007076123191'],
- [1495701634.925, '0.003753167300126738'],
- [1495701694.925, '0.005018469678430698'],
- [1495701754.925, '0.0045217153371887'],
- [1495701814.925, '0.006140104285714119'],
- [1495701874.925, '0.004818684285714102'],
- [1495701934.925, '0.005079509718955242'],
- [1495701994.925, '0.005059981142498263'],
- [1495702054.925, '0.005269098389538773'],
- [1495702114.925, '0.005269954285714175'],
- [1495702174.925, '0.014199241435795856'],
- [1495702234.925, '0.01511936843111017'],
- [1495702294.925, '0.0060933692920682875'],
- [1495702354.925, '0.004945682380952493'],
- [1495702414.925, '0.005641266666666565'],
- [1495702474.925, '0.005223752857142996'],
- [1495702534.925, '0.005743098505699831'],
- [1495702594.925, '0.00538493380952391'],
- [1495702654.925, '0.005507793883751339'],
- [1495702714.925, '0.005666705714285466'],
- [1495702774.925, '0.006231530000000112'],
- [1495702834.925, '0.006570768635394899'],
- [1495702894.925, '0.005551146666666895'],
- [1495702954.925, '0.005602604737098058'],
- [1495703014.925, '0.00613993580402159'],
- [1495703074.925, '0.004770258764368832'],
- [1495703134.925, '0.005512376671364914'],
- [1495703194.925, '0.005254436666666674'],
- [1495703254.925, '0.0050109839141320505'],
- [1495703314.925, '0.0049478019256960016'],
- [1495703374.925, '0.0037666860965123463'],
- [1495703434.925, '0.004813526061656314'],
- [1495703494.925, '0.005047748095238278'],
- [1495703554.925, '0.00386494081008772'],
- [1495703614.925, '0.004304037408111405'],
- [1495703674.925, '0.004999466661587168'],
- [1495703734.925, '0.004689140476190834'],
- [1495703794.925, '0.004746126153582475'],
- [1495703854.925, '0.004482706382572302'],
- [1495703914.925, '0.004032808931864524'],
- [1495703974.925, '0.005728319047618988'],
- [1495704034.925, '0.004436139179627006'],
- [1495704094.925, '0.004553455714285617'],
- [1495704154.925, '0.003455244285714341'],
- [1495704214.925, '0.004742244761904621'],
- [1495704274.925, '0.005366978571428422'],
- [1495704334.925, '0.004257954837665058'],
- [1495704394.925, '0.005431603259831257'],
- [1495704454.925, '0.0052009214498621986'],
- [1495704514.925, '0.004317201904761618'],
- [1495704574.925, '0.004307384285714157'],
- [1495704634.925, '0.004789801146644822'],
- [1495704694.925, '0.0051429795906706485'],
- [1495704754.925, '0.005322495714285479'],
- [1495704814.925, '0.004512809333244233'],
- [1495704874.925, '0.004953843582568726'],
- [1495704934.925, '0.005812690120858119'],
- [1495704994.925, '0.004997024285714838'],
- [1495705054.925, '0.005246216154439592'],
- [1495705114.925, '0.0063494966618726795'],
- [1495705174.925, '0.005306004342898225'],
- [1495705234.925, '0.005081412857142978'],
- [1495705294.925, '0.00511409523809522'],
- [1495705354.925, '0.0047861001481192'],
- [1495705414.925, '0.005107688228042962'],
- [1495705474.925, '0.005271929582294012'],
- [1495705534.925, '0.004453254502681249'],
- [1495705594.925, '0.005799134293959226'],
- [1495705654.925, '0.005340865929502478'],
- [1495705714.925, '0.004911654761904942'],
- [1495705774.925, '0.005888234873953261'],
- [1495705834.925, '0.005565283333332954'],
- [1495705894.925, '0.005522869047618869'],
- [1495705954.925, '0.005177549737621646'],
- [1495706014.925, '0.0053145810232096465'],
- [1495706074.925, '0.004751095238095275'],
- [1495706134.925, '0.006242077142856976'],
- [1495706194.925, '0.00621034406957871'],
- [1495706254.925, '0.006887592738978596'],
- [1495706314.925, '0.006328128779726213'],
- [1495706374.925, '0.007488363809523927'],
- [1495706434.925, '0.006193758571428157'],
- [1495706494.925, '0.0068798371839706935'],
- [1495706554.925, '0.005757034340423128'],
- [1495706614.925, '0.004571388497294698'],
- [1495706674.925, '0.00620283044923395'],
- [1495706734.925, '0.005607562380952455'],
- [1495706794.925, '0.005506969933620308'],
- [1495706854.925, '0.005621118095238131'],
- [1495706914.925, '0.004876606098698849'],
- [1495706974.925, '0.0047871205988517206'],
- [1495707034.925, '0.00526405939458784'],
- [1495707094.925, '0.005716323800605852'],
- [1495707154.925, '0.005301459523809575'],
- [1495707214.925, '0.0051613042857144905'],
- [1495707274.925, '0.005384792857142714'],
- [1495707334.925, '0.005259719047619222'],
- [1495707394.925, '0.00584101142857182'],
- [1495707454.925, '0.0060066121920326326'],
- [1495707514.925, '0.006359978571428453'],
- [1495707574.925, '0.006315876322151109'],
- [1495707634.925, '0.005590012517198831'],
- [1495707694.925, '0.005517419877137072'],
- [1495707754.925, '0.006089813430348506'],
- [1495707814.925, '0.00466754476190479'],
- [1495707874.925, '0.006059954380517721'],
- [1495707934.925, '0.005085657142856972'],
- [1495707994.925, '0.005897665238095296'],
- [1495708054.925, '0.0062282023199555885'],
- [1495708114.925, '0.00526214553236979'],
- [1495708174.925, '0.0044803300000000644'],
- [1495708234.925, '0.005421443333333592'],
- [1495708294.925, '0.005694326244512144'],
- [1495708354.925, '0.005527721904761457'],
- [1495708414.925, '0.005988819523809819'],
- [1495708474.925, '0.005484704285714448'],
- [1495708534.925, '0.005041123649230085'],
- [1495708594.925, '0.005717767639612059'],
- [1495708654.925, '0.005412954417342863'],
- [1495708714.925, '0.005833343333333254'],
- [1495708774.925, '0.005448135238094969'],
- [1495708834.925, '0.005117341428571432'],
- [1495708894.925, '0.005888345825277833'],
- [1495708954.925, '0.005398543809524135'],
- [1495709014.925, '0.005325611428571416'],
- [1495709074.925, '0.005848668571428527'],
- [1495709134.925, '0.005135003105145044'],
- [1495709194.925, '0.0054551400000003'],
- [1495709254.925, '0.005319472937322171'],
- [1495709314.925, '0.00585677857142792'],
- [1495709374.925, '0.0062146261904759215'],
- [1495709434.925, '0.0067105060904182265'],
- [1495709494.925, '0.005829691904762108'],
- [1495709554.925, '0.005719280952381261'],
- [1495709614.925, '0.005682603793416407'],
- [1495709674.925, '0.0055272846277326934'],
- [1495709734.925, '0.0057123680952386735'],
- [1495709794.925, '0.00520597958075818'],
- [1495709854.925, '0.005584358957263837'],
- [1495709914.925, '0.005601104275197466'],
- [1495709974.925, '0.005991657142857066'],
- [1495710034.925, '0.00553722238095218'],
- [1495710094.925, '0.005127883122696293'],
- [1495710154.925, '0.005498111927534584'],
- [1495710214.925, '0.005609934069084202'],
- [1495710274.925, '0.00459206285714307'],
- [1495710334.925, '0.0047910828571428084'],
- [1495710394.925, '0.0056014671288845685'],
- [1495710454.925, '0.005686936791078528'],
- [1495710514.925, '0.00444480476190448'],
- [1495710574.925, '0.005780394696738921'],
- [1495710634.925, '0.0053107227550210365'],
- [1495710694.925, '0.005096031495761817'],
- [1495710754.925, '0.005451377979091524'],
- [1495710814.925, '0.005328136666667083'],
- [1495710874.925, '0.006020612857143043'],
- [1495710934.925, '0.0061063585714285365'],
- [1495710994.925, '0.006018346015752312'],
- [1495711054.925, '0.005069130952381193'],
- [1495711114.925, '0.005458406190476052'],
- [1495711174.925, '0.00577219190476179'],
- [1495711234.925, '0.005760814645658314'],
- [1495711294.925, '0.005371875716579101'],
- [1495711354.925, '0.0064232666666665834'],
- [1495711414.925, '0.009369806836906667'],
- [1495711474.925, '0.008956864761904692'],
- [1495711534.925, '0.005266849368559271'],
- [1495711594.925, '0.005335111364934262'],
- [1495711654.925, '0.006461778319586945'],
- [1495711714.925, '0.004687939890762393'],
- [1495711774.925, '0.004438831245760684'],
- [1495711834.925, '0.005142786666666613'],
- [1495711894.925, '0.007257734212054963'],
- [1495711954.925, '0.005621991904761494'],
- [1495712014.925, '0.007868689999999862'],
- [1495712074.925, '0.00910970215275738'],
- [1495712134.925, '0.006151004285714278'],
- [1495712194.925, '0.005447120924961522'],
- [1495712254.925, '0.005150705153929503'],
- [1495712314.925, '0.006358108714969314'],
- [1495712374.925, '0.0057725354795696475'],
- [1495712434.925, '0.005232139047619015'],
- [1495712494.925, '0.004932809617949037'],
- [1495712554.925, '0.004511607508499662'],
- [1495712614.925, '0.00440487701522666'],
- [1495712674.925, '0.005479113333333174'],
- [1495712734.925, '0.004726317619047547'],
- [1495712794.925, '0.005582041102958029'],
- [1495712854.925, '0.006381481216082099'],
- [1495712914.925, '0.005474260014095208'],
- [1495712974.925, '0.00567597142857188'],
- [1495713034.925, '0.0064741233333332985'],
- [1495713094.925, '0.005467475714285271'],
- [1495713154.925, '0.004868648393824457'],
- [1495713214.925, '0.005254923286444893'],
- [1495713274.925, '0.005599217150312865'],
- [1495713334.925, '0.005105413720618919'],
- [1495713394.925, '0.007246073333333279'],
- [1495713454.925, '0.005990312380952272'],
- [1495713514.925, '0.005594601853351101'],
- [1495713574.925, '0.004739258673727054'],
- [1495713634.925, '0.003932121428571783'],
- [1495713694.925, '0.005018188268459395'],
- [1495713754.925, '0.004538238095237985'],
- [1495713814.925, '0.00561816643265435'],
- [1495713874.925, '0.0063132584495033586'],
- [1495713934.925, '0.00442385238095213'],
- [1495713994.925, '0.004181795887658453'],
- [1495714054.925, '0.004437759047619037'],
- [1495714114.925, '0.006421748157178241'],
- [1495714174.925, '0.006525143809523842'],
- [1495714234.925, '0.004715904935144247'],
- [1495714294.925, '0.005966040152763461'],
- [1495714354.925, '0.005614535466921674'],
- [1495714414.925, '0.004934375119415906'],
- [1495714474.925, '0.0054122933333327385'],
- [1495714534.925, '0.004926540699612279'],
- [1495714594.925, '0.006124649517134237'],
- [1495714654.925, '0.004629427092013995'],
- [1495714714.925, '0.005117951257607005'],
- [1495714774.925, '0.004868774512685422'],
- [1495714834.925, '0.005310093333333399'],
- [1495714894.925, '0.0054907752286127345'],
- [1495714954.925, '0.004597678117351089'],
- [1495715014.925, '0.0059622552380952'],
- [1495715074.925, '0.005352457072655368'],
- [1495715134.925, '0.005491630952381143'],
- [1495715194.925, '0.006391770078379791'],
- [1495715254.925, '0.005933472857142518'],
- [1495715314.925, '0.005301314285714163'],
- [1495715374.925, '0.0058352959724814165'],
- [1495715434.925, '0.006154755147867044'],
- [1495715494.925, '0.009391935637482038'],
- [1495715554.925, '0.007846462857142592'],
- [1495715614.925, '0.00477608215316353'],
- [1495715674.925, '0.006132865238094998'],
- [1495715734.925, '0.006159762457649516'],
- [1495715794.925, '0.005957307073265968'],
- [1495715854.925, '0.006652319091792501'],
- [1495715914.925, '0.005493557402895287'],
- [1495715974.925, '0.0058652434829145166'],
- [1495716034.925, '0.005627400430468021'],
- [1495716094.925, '0.006240656190475609'],
- [1495716154.925, '0.006305997676168624'],
- [1495716214.925, '0.005388057732783248'],
- [1495716274.925, '0.0052814916048421244'],
- [1495716334.925, '0.00699498614272497'],
- [1495716394.925, '0.00627768693035141'],
- [1495716454.925, '0.0042411487048161145'],
- [1495716514.925, '0.005348647473627653'],
- [1495716574.925, '0.0047176657142853975'],
- [1495716634.925, '0.004437898571428686'],
- [1495716694.925, '0.004923527366927261'],
- [1495716754.925, '0.005131935066048421'],
- [1495716814.925, '0.005046949523809611'],
- [1495716874.925, '0.00547184095238092'],
- [1495716934.925, '0.005224140016380444'],
- [1495716994.925, '0.005297991171665292'],
- [1495717054.925, '0.005492965995623498'],
- [1495717114.925, '0.005754660000000403'],
- [1495717174.925, '0.005949557138639285'],
- [1495717234.925, '0.006091816112534666'],
- [1495717294.925, '0.005554210080192063'],
- [1495717354.925, '0.006411504395279871'],
- [1495717414.925, '0.006319643996609606'],
- [1495717474.925, '0.005539174405717675'],
- [1495717534.925, '0.0053157078842772255'],
- [1495717594.925, '0.005247480952381066'],
- [1495717654.925, '0.004820141620396252'],
- [1495717714.925, '0.005906173868322844'],
- [1495717774.925, '0.006173117219570961'],
- [1495717834.925, '0.005963340952380661'],
- [1495717894.925, '0.005698976627681527'],
- [1495717954.925, '0.004751279096346378'],
- [1495718014.925, '0.005733142379359711'],
- [1495718074.925, '0.004831689010348035'],
- [1495718134.925, '0.005188370476191092'],
- [1495718194.925, '0.004793227554547938'],
- [1495718254.925, '0.003997442857142731'],
- [1495718314.925, '0.004386040132951264'],
- ],
- },
- ],
- },
- ],
- },
- ],
- },
+export const mockApiEndpoint = `${gl.TEST_HOST}/monitoring/mock`;
+
+export const mockedQueryResultPayload = {
+ metricId: '17_system_metrics_kubernetes_container_memory_average',
+ result: [
{
- group: 'NGINX',
- priority: 2,
- metrics: [
- {
- id: 100,
- title: 'Http Error Rate',
- weight: 100,
- queries: [
- {
- query_range:
- 'sum(rate(nginx_upstream_responses_total{status_code="5xx", upstream=~"nginx-test-8691397-production-.*"}[2m])) / sum(rate(nginx_upstream_responses_total{upstream=~"nginx-test-8691397-production-.*"}[2m])) * 100',
- label: '5xx errors',
- unit: '%',
- result: [
- {
- metric: {},
- values: [
- [1495700554.925, NaN],
- [1495700614.925, NaN],
- [1495700674.925, NaN],
- [1495700734.925, NaN],
- [1495700794.925, NaN],
- [1495700854.925, NaN],
- [1495700914.925, NaN],
- ],
- },
- ],
- },
- ],
- },
+ metric: {},
+ values: [
+ [1563272065.589, '10.396484375'],
+ [1563272125.589, '10.333984375'],
+ [1563272185.589, '10.333984375'],
+ [1563272245.589, '10.333984375'],
+ [1563272305.589, '10.333984375'],
+ [1563272365.589, '10.333984375'],
+ [1563272425.589, '10.38671875'],
+ [1563272485.589, '10.333984375'],
+ [1563272545.589, '10.333984375'],
+ [1563272605.589, '10.333984375'],
+ [1563272665.589, '10.333984375'],
+ [1563272725.589, '10.333984375'],
+ [1563272785.589, '10.396484375'],
+ [1563272845.589, '10.333984375'],
+ [1563272905.589, '10.333984375'],
+ [1563272965.589, '10.3984375'],
+ [1563273025.589, '10.337890625'],
+ [1563273085.589, '10.34765625'],
+ [1563273145.589, '10.337890625'],
+ [1563273205.589, '10.337890625'],
+ [1563273265.589, '10.337890625'],
+ [1563273325.589, '10.337890625'],
+ [1563273385.589, '10.337890625'],
+ [1563273445.589, '10.337890625'],
+ [1563273505.589, '10.337890625'],
+ [1563273565.589, '10.337890625'],
+ [1563273625.589, '10.337890625'],
+ [1563273685.589, '10.337890625'],
+ [1563273745.589, '10.337890625'],
+ [1563273805.589, '10.337890625'],
+ [1563273865.589, '10.390625'],
+ [1563273925.589, '10.390625'],
],
},
],
- last_update: '2017-05-25T13:18:34.949Z',
};
-export const singleGroupResponse = [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- metrics: [
- {
- title: 'Memory Usage (Total)',
- weight: 0,
- y_label: 'Total Memory Used',
- queries: [
- {
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^production-(.*)",namespace="autodevops-deploy-33"}) by (job)) without (job) /1024/1024/1024',
- unit: 'GB',
- label: 'Total',
- result: [
- {
- metric: {},
- values: [
- [1558453960.079, '0.0357666015625'],
- [1558454020.079, '0.035675048828125'],
- [1558454080.079, '0.035152435302734375'],
- [1558454140.079, '0.035221099853515625'],
- [1558454200.079, '0.0352325439453125'],
- [1558454260.079, '0.03479766845703125'],
- [1558454320.079, '0.034793853759765625'],
- [1558454380.079, '0.034931182861328125'],
- [1558454440.079, '0.034816741943359375'],
- [1558454500.079, '0.034816741943359375'],
- [1558454560.079, '0.034816741943359375'],
- ],
- },
- ],
- },
- ],
- id: 15,
- },
- ],
- },
-];
-
-export default metricsGroupsAPIResponse;
-
-export const deploymentData = [
- {
- id: 111,
- iid: 3,
- sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
- commitUrl:
- 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
- ref: {
- name: 'master',
- },
- created_at: '2017-05-31T21:23:37.881Z',
- tag: false,
- tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false',
- 'last?': true,
- },
- {
- id: 110,
- iid: 2,
- sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
- commitUrl:
- 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
- ref: {
- name: 'master',
- },
- created_at: '2017-05-30T20:08:04.629Z',
- tag: false,
- tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false',
- 'last?': false,
- },
- {
- id: 109,
- iid: 1,
- sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2',
- commitUrl:
- 'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2',
- ref: {
- name: 'update2-readme',
- },
- created_at: '2017-05-30T17:42:38.409Z',
- tag: false,
- tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false',
- 'last?': false,
- },
-];
-
-export const statePaths = {
- settingsPath: '/root/hello-prometheus/services/prometheus/edit',
- clustersPath: '/root/hello-prometheus/clusters',
- documentationPath: '/help/administration/monitoring/prometheus/index.md',
-};
-
-export const queryWithoutData = {
- title: 'HTTP Error rate',
- weight: 10,
- y_label: 'Http Error Rate',
- queries: [
+export const mockedQueryResultPayloadCoresTotal = {
+ metricId: '13_system_metrics_kubernetes_container_cores_total',
+ result: [
{
- query_range:
- 'sum(rate(nginx_upstream_responses_total{status_code="5xx", upstream=~"nginx-test-8691397-production-.*"}[2m])) / sum(rate(nginx_upstream_responses_total{upstream=~"nginx-test-8691397-production-.*"}[2m])) * 100',
- label: '5xx errors',
- unit: '%',
- result: [],
+ metric: {},
+ values: [
+ [1563272065.589, '9.396484375'],
+ [1563272125.589, '9.333984375'],
+ [1563272185.589, '9.333984375'],
+ [1563272245.589, '9.333984375'],
+ [1563272305.589, '9.333984375'],
+ [1563272365.589, '9.333984375'],
+ [1563272425.589, '9.38671875'],
+ [1563272485.589, '9.333984375'],
+ [1563272545.589, '9.333984375'],
+ [1563272605.589, '9.333984375'],
+ [1563272665.589, '9.333984375'],
+ [1563272725.589, '9.333984375'],
+ [1563272785.589, '9.396484375'],
+ [1563272845.589, '9.333984375'],
+ [1563272905.589, '9.333984375'],
+ [1563272965.589, '9.3984375'],
+ [1563273025.589, '9.337890625'],
+ [1563273085.589, '9.34765625'],
+ [1563273145.589, '9.337890625'],
+ [1563273205.589, '9.337890625'],
+ [1563273265.589, '9.337890625'],
+ [1563273325.589, '9.337890625'],
+ [1563273385.589, '9.337890625'],
+ [1563273445.589, '9.337890625'],
+ [1563273505.589, '9.337890625'],
+ [1563273565.589, '9.337890625'],
+ [1563273625.589, '9.337890625'],
+ [1563273685.589, '9.337890625'],
+ [1563273745.589, '9.337890625'],
+ [1563273805.589, '9.337890625'],
+ [1563273865.589, '9.390625'],
+ [1563273925.589, '9.390625'],
+ ],
},
],
};
-export function convertDatesMultipleSeries(multipleSeries) {
- const convertedMultiple = multipleSeries;
- multipleSeries.forEach((column, index) => {
- let convertedResult = [];
- convertedResult = column.queries[0].result.map(resultObj => {
- const convertedMetrics = {};
- convertedMetrics.values = resultObj.values.map(val => ({
- time: new Date(val.time),
- value: val.value,
- }));
- convertedMetrics.metric = resultObj.metric;
- return convertedMetrics;
- });
- convertedMultiple[index].queries[0].result = convertedResult;
- });
- return convertedMultiple;
-}
-
-export const environmentData = [
- {
- id: 34,
- name: 'production',
- state: 'available',
- external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
- environment_type: null,
- stop_action: false,
- metrics_path: '/root/hello-prometheus/environments/34/metrics',
- environment_path: '/root/hello-prometheus/environments/34',
- stop_path: '/root/hello-prometheus/environments/34/stop',
- terminal_path: '/root/hello-prometheus/environments/34/terminal',
- folder_path: '/root/hello-prometheus/environments/folders/production',
- created_at: '2018-06-29T16:53:38.301Z',
- updated_at: '2018-06-29T16:57:09.825Z',
- last_deployment: {
- id: 127,
- },
- },
- {
- id: 35,
- name: 'review/noop-branch',
- state: 'available',
- external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
- environment_type: 'review',
- stop_action: true,
- metrics_path: '/root/hello-prometheus/environments/35/metrics',
- environment_path: '/root/hello-prometheus/environments/35',
- stop_path: '/root/hello-prometheus/environments/35/stop',
- terminal_path: '/root/hello-prometheus/environments/35/terminal',
- folder_path: '/root/hello-prometheus/environments/folders/review',
- created_at: '2018-07-03T18:39:41.702Z',
- updated_at: '2018-07-03T18:44:54.010Z',
- last_deployment: {
- id: 128,
- },
- },
- {
- id: 36,
- name: 'no-deployment/noop-branch',
- state: 'available',
- created_at: '2018-07-04T18:39:41.702Z',
- updated_at: '2018-07-04T18:44:54.010Z',
- },
-];
-
-export const metricsDashboardResponse = {
- dashboard: {
- dashboard: 'Environment metrics',
- priority: 1,
- panel_groups: [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Memory Used',
- weight: 4,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
- label: 'Total',
- unit: 'GB',
- metric_id: 12,
- prometheus_endpoint_path: 'http://test',
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- {
- title: 'Memory Usage (Pod average)',
- type: 'line-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 14,
- },
- ],
- },
- ],
- },
- ],
- },
- status: 'success',
-};
-
-export const dashboardGitResponse = [
- {
- path: 'config/prometheus/common_metrics.yml',
- display_name: 'Common Metrics',
- default: true,
- },
- {
- path: '.gitlab/dashboards/super.yml',
- display_name: 'Custom Dashboard 1',
- default: false,
- },
-];
-
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
type: 'single-stat',
@@ -975,7 +135,7 @@ export const graphDataPrometheusQuery = {
export const graphDataPrometheusQueryRange = {
title: 'Super Chart A1',
- type: 'area',
+ type: 'area-chart',
weight: 2,
metrics: [
{
@@ -991,7 +151,7 @@ export const graphDataPrometheusQueryRange = {
],
queries: [
{
- metricId: null,
+ metricId: '10',
id: 'metric_a1',
metric_id: 2,
query_range:
@@ -1009,3 +169,82 @@ export const graphDataPrometheusQueryRange = {
},
],
};
+
+export const graphDataPrometheusQueryRangeMultiTrack = {
+ title: 'Super Chart A3',
+ type: 'heatmap',
+ weight: 3,
+ x_label: 'Status Code',
+ y_label: 'Time',
+ metrics: [],
+ queries: [
+ {
+ metricId: '1',
+ id: 'response_metrics_nginx_ingress_throughput_status_code',
+ query_range:
+ 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[60m])) by (status_code)',
+ unit: 'req / sec',
+ label: 'Status Code',
+ metric_id: 1,
+ prometheus_endpoint_path:
+ '/root/rails_nodb/environments/3/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
+ result: [
+ {
+ metric: { status_code: '1xx' },
+ values: [
+ ['2019-08-30T15:00:00.000Z', 0],
+ ['2019-08-30T16:00:00.000Z', 2],
+ ['2019-08-30T17:00:00.000Z', 0],
+ ['2019-08-30T18:00:00.000Z', 0],
+ ['2019-08-30T19:00:00.000Z', 0],
+ ['2019-08-30T20:00:00.000Z', 3],
+ ],
+ },
+ {
+ metric: { status_code: '2xx' },
+ values: [
+ ['2019-08-30T15:00:00.000Z', 1],
+ ['2019-08-30T16:00:00.000Z', 3],
+ ['2019-08-30T17:00:00.000Z', 6],
+ ['2019-08-30T18:00:00.000Z', 10],
+ ['2019-08-30T19:00:00.000Z', 8],
+ ['2019-08-30T20:00:00.000Z', 6],
+ ],
+ },
+ {
+ metric: { status_code: '3xx' },
+ values: [
+ ['2019-08-30T15:00:00.000Z', 1],
+ ['2019-08-30T16:00:00.000Z', 2],
+ ['2019-08-30T17:00:00.000Z', 3],
+ ['2019-08-30T18:00:00.000Z', 3],
+ ['2019-08-30T19:00:00.000Z', 2],
+ ['2019-08-30T20:00:00.000Z', 1],
+ ],
+ },
+ {
+ metric: { status_code: '4xx' },
+ values: [
+ ['2019-08-30T15:00:00.000Z', 2],
+ ['2019-08-30T16:00:00.000Z', 0],
+ ['2019-08-30T17:00:00.000Z', 0],
+ ['2019-08-30T18:00:00.000Z', 2],
+ ['2019-08-30T19:00:00.000Z', 0],
+ ['2019-08-30T20:00:00.000Z', 2],
+ ],
+ },
+ {
+ metric: { status_code: '5xx' },
+ values: [
+ ['2019-08-30T15:00:00.000Z', 0],
+ ['2019-08-30T16:00:00.000Z', 1],
+ ['2019-08-30T17:00:00.000Z', 0],
+ ['2019-08-30T18:00:00.000Z', 0],
+ ['2019-08-30T19:00:00.000Z', 0],
+ ['2019-08-30T20:00:00.000Z', 2],
+ ],
+ },
+ ],
+ },
+ ],
+};
diff --git a/spec/javascripts/monitoring/panel_type_spec.js b/spec/javascripts/monitoring/panel_type_spec.js
deleted file mode 100644
index a2366e74d43..00000000000
--- a/spec/javascripts/monitoring/panel_type_spec.js
+++ /dev/null
@@ -1,79 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import PanelType from '~/monitoring/components/panel_type.vue';
-import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
-import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
-import { graphDataPrometheusQueryRange } from './mock_data';
-import { createStore } from '~/monitoring/stores';
-
-describe('Panel Type component', () => {
- let store;
- let panelType;
- const dashboardWidth = 100;
-
- describe('When no graphData is available', () => {
- let glEmptyChart;
- // Deep clone object before modifying
- const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
- graphDataNoResult.queries[0].result = [];
-
- beforeEach(() => {
- panelType = shallowMount(PanelType, {
- propsData: {
- clipboardText: 'dashboard_link',
- dashboardWidth,
- graphData: graphDataNoResult,
- },
- });
- });
-
- afterEach(() => {
- panelType.destroy();
- });
-
- describe('Empty Chart component', () => {
- beforeEach(() => {
- glEmptyChart = panelType.find(EmptyChart);
- });
-
- it('is a Vue instance', () => {
- expect(glEmptyChart.isVueInstance()).toBe(true);
- });
-
- it('it receives a graph title', () => {
- const props = glEmptyChart.props();
-
- expect(props.graphTitle).toBe(panelType.vm.graphData.title);
- });
- });
- });
-
- describe('when Graph data is available', () => {
- const exampleText = 'example_text';
-
- beforeEach(() => {
- store = createStore();
- panelType = shallowMount(PanelType, {
- propsData: {
- clipboardText: exampleText,
- dashboardWidth,
- graphData: graphDataPrometheusQueryRange,
- },
- store,
- });
- });
-
- describe('Time Series Chart panel type', () => {
- it('is rendered', () => {
- expect(panelType.find(TimeSeriesChart).isVueInstance()).toBe(true);
- expect(panelType.find(TimeSeriesChart).exists()).toBe(true);
- });
-
- it('sets clipboard text on the dropdown', () => {
- const link = () => panelType.find('.js-chart-link');
- const clipboardText = () => link().element.dataset.clipboardText;
-
- expect(clipboardText()).toBe(exampleText);
- });
- });
- });
-});
diff --git a/spec/javascripts/monitoring/shared/prometheus_header_spec.js b/spec/javascripts/monitoring/shared/prometheus_header_spec.js
new file mode 100644
index 00000000000..9f916a4dfbb
--- /dev/null
+++ b/spec/javascripts/monitoring/shared/prometheus_header_spec.js
@@ -0,0 +1,26 @@
+import { shallowMount } from '@vue/test-utils';
+import PrometheusHeader from '~/monitoring/components/shared/prometheus_header.vue';
+
+describe('Prometheus Header component', () => {
+ let prometheusHeader;
+
+ beforeEach(() => {
+ prometheusHeader = shallowMount(PrometheusHeader, {
+ propsData: {
+ graphTitle: 'graph header',
+ },
+ });
+ });
+
+ afterEach(() => {
+ prometheusHeader.destroy();
+ });
+
+ describe('Prometheus header component', () => {
+ it('should show a title', () => {
+ const title = prometheusHeader.vm.$el.querySelector('.js-graph-title').textContent;
+
+ expect(title).toBe('graph header');
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/utils_spec.js b/spec/javascripts/monitoring/utils_spec.js
index 512dd2a0eb3..202b4ec8f2e 100644
--- a/spec/javascripts/monitoring/utils_spec.js
+++ b/spec/javascripts/monitoring/utils_spec.js
@@ -7,9 +7,14 @@ import {
stringToISODate,
ISODateToString,
isValidDate,
+ graphDataValidatorForAnomalyValues,
} from '~/monitoring/utils';
import { timeWindows, timeWindowsKeyNames } from '~/monitoring/constants';
-import { graphDataPrometheusQuery, graphDataPrometheusQueryRange } from './mock_data';
+import {
+ graphDataPrometheusQuery,
+ graphDataPrometheusQueryRange,
+ anomalyMockGraphData,
+} from './mock_data';
describe('getTimeDiff', () => {
function secondsBetween({ start, end }) {
@@ -307,3 +312,34 @@ describe('isDateTimePickerInputValid', () => {
});
});
});
+
+describe('graphDataValidatorForAnomalyValues', () => {
+ let oneQuery;
+ let threeQueries;
+ let fourQueries;
+ beforeEach(() => {
+ oneQuery = graphDataPrometheusQuery;
+ threeQueries = anomalyMockGraphData;
+
+ const queries = [...threeQueries.queries];
+ queries.push(threeQueries.queries[0]);
+ fourQueries = {
+ ...anomalyMockGraphData,
+ queries,
+ };
+ });
+ /*
+ * Anomaly charts can accept results for exactly 3 queries,
+ */
+ it('validates passes with the right query format', () => {
+ expect(graphDataValidatorForAnomalyValues(threeQueries)).toBe(true);
+ });
+
+ it('validation fails for wrong format, 1 metric', () => {
+ expect(graphDataValidatorForAnomalyValues(oneQuery)).toBe(false);
+ });
+
+ it('validation fails for wrong format, more than 3 metrics', () => {
+ expect(graphDataValidatorForAnomalyValues(fourQueries)).toBe(false);
+ });
+});
diff --git a/spec/javascripts/notes/components/comment_form_spec.js b/spec/javascripts/notes/components/comment_form_spec.js
deleted file mode 100644
index 88c86746992..00000000000
--- a/spec/javascripts/notes/components/comment_form_spec.js
+++ /dev/null
@@ -1,301 +0,0 @@
-import $ from 'jquery';
-import Vue from 'vue';
-import Autosize from 'autosize';
-import createStore from '~/notes/stores';
-import CommentForm from '~/notes/components/comment_form.vue';
-import * as constants from '~/notes/constants';
-import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data';
-import { keyboardDownEvent } from '../../issue_show/helpers';
-
-describe('issue_comment_form component', () => {
- let store;
- let vm;
- const Component = Vue.extend(CommentForm);
- let mountComponent;
-
- beforeEach(() => {
- store = createStore();
- mountComponent = (noteableType = 'issue') =>
- new Component({
- propsData: {
- noteableType,
- },
- store,
- }).$mount();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('user is logged in', () => {
- beforeEach(() => {
- store.dispatch('setUserData', userDataMock);
- store.dispatch('setNoteableData', noteableDataMock);
- store.dispatch('setNotesData', notesDataMock);
-
- vm = mountComponent();
- });
-
- it('should render user avatar with link', () => {
- expect(vm.$el.querySelector('.timeline-icon .user-avatar-link').getAttribute('href')).toEqual(
- userDataMock.path,
- );
- });
-
- describe('handleSave', () => {
- it('should request to save note when note is entered', () => {
- vm.note = 'hello world';
- spyOn(vm, 'saveNote').and.returnValue(new Promise(() => {}));
- spyOn(vm, 'resizeTextarea');
- spyOn(vm, 'stopPolling');
-
- vm.handleSave();
-
- expect(vm.isSubmitting).toEqual(true);
- expect(vm.note).toEqual('');
- expect(vm.saveNote).toHaveBeenCalled();
- expect(vm.stopPolling).toHaveBeenCalled();
- expect(vm.resizeTextarea).toHaveBeenCalled();
- });
-
- it('should toggle issue state when no note', () => {
- spyOn(vm, 'toggleIssueState');
-
- vm.handleSave();
-
- expect(vm.toggleIssueState).toHaveBeenCalled();
- });
-
- it('should disable action button whilst submitting', done => {
- const saveNotePromise = Promise.resolve();
- vm.note = 'hello world';
- spyOn(vm, 'saveNote').and.returnValue(saveNotePromise);
- spyOn(vm, 'stopPolling');
-
- const actionButton = vm.$el.querySelector('.js-action-button');
-
- vm.handleSave();
-
- Vue.nextTick()
- .then(() => {
- expect(actionButton.disabled).toBeTruthy();
- })
- .then(saveNotePromise)
- .then(Vue.nextTick)
- .then(() => {
- expect(actionButton.disabled).toBeFalsy();
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('textarea', () => {
- it('should render textarea with placeholder', () => {
- expect(
- vm.$el.querySelector('.js-main-target-form textarea').getAttribute('placeholder'),
- ).toEqual('Write a comment or drag your files here…');
- });
-
- it('should make textarea disabled while requesting', done => {
- const $submitButton = $(vm.$el.querySelector('.js-comment-submit-button'));
- vm.note = 'hello world';
- spyOn(vm, 'stopPolling');
- spyOn(vm, 'saveNote').and.returnValue(new Promise(() => {}));
-
- vm.$nextTick(() => {
- // Wait for vm.note change triggered. It should enable $submitButton.
- $submitButton.trigger('click');
-
- vm.$nextTick(() => {
- // Wait for vm.isSubmitting triggered. It should disable textarea.
- expect(vm.$el.querySelector('.js-main-target-form textarea').disabled).toBeTruthy();
- done();
- });
- });
- });
-
- it('should support quick actions', () => {
- expect(
- vm.$el
- .querySelector('.js-main-target-form textarea')
- .getAttribute('data-supports-quick-actions'),
- ).toEqual('true');
- });
-
- it('should link to markdown docs', () => {
- const { markdownDocsPath } = notesDataMock;
-
- expect(vm.$el.querySelector(`a[href="${markdownDocsPath}"]`).textContent.trim()).toEqual(
- 'Markdown',
- );
- });
-
- it('should link to quick actions docs', () => {
- const { quickActionsDocsPath } = notesDataMock;
-
- expect(
- vm.$el.querySelector(`a[href="${quickActionsDocsPath}"]`).textContent.trim(),
- ).toEqual('quick actions');
- });
-
- it('should resize textarea after note discarded', done => {
- spyOn(Autosize, 'update');
- spyOn(vm, 'discard').and.callThrough();
-
- vm.note = 'foo';
- vm.discard();
-
- Vue.nextTick(() => {
- expect(Autosize.update).toHaveBeenCalled();
- done();
- });
- });
-
- describe('edit mode', () => {
- it('should enter edit mode when arrow up is pressed', () => {
- spyOn(vm, 'editCurrentUserLastNote').and.callThrough();
- vm.$el.querySelector('.js-main-target-form textarea').value = 'Foo';
- vm.$el
- .querySelector('.js-main-target-form textarea')
- .dispatchEvent(keyboardDownEvent(38, true));
-
- expect(vm.editCurrentUserLastNote).toHaveBeenCalled();
- });
-
- it('inits autosave', () => {
- expect(vm.autosave).toBeDefined();
- expect(vm.autosave.key).toEqual(`autosave/Note/Issue/${noteableDataMock.id}`);
- });
- });
-
- describe('event enter', () => {
- it('should save note when cmd+enter is pressed', () => {
- spyOn(vm, 'handleSave').and.callThrough();
- vm.$el.querySelector('.js-main-target-form textarea').value = 'Foo';
- vm.$el
- .querySelector('.js-main-target-form textarea')
- .dispatchEvent(keyboardDownEvent(13, true));
-
- expect(vm.handleSave).toHaveBeenCalled();
- });
-
- it('should save note when ctrl+enter is pressed', () => {
- spyOn(vm, 'handleSave').and.callThrough();
- vm.$el.querySelector('.js-main-target-form textarea').value = 'Foo';
- vm.$el
- .querySelector('.js-main-target-form textarea')
- .dispatchEvent(keyboardDownEvent(13, false, true));
-
- expect(vm.handleSave).toHaveBeenCalled();
- });
- });
- });
-
- describe('actions', () => {
- it('should be possible to close the issue', () => {
- expect(vm.$el.querySelector('.btn-comment-and-close').textContent.trim()).toEqual(
- 'Close issue',
- );
- });
-
- it('should render comment button as disabled', () => {
- expect(vm.$el.querySelector('.js-comment-submit-button').getAttribute('disabled')).toEqual(
- 'disabled',
- );
- });
-
- it('should enable comment button if it has note', done => {
- vm.note = 'Foo';
- Vue.nextTick(() => {
- expect(
- vm.$el.querySelector('.js-comment-submit-button').getAttribute('disabled'),
- ).toEqual(null);
- done();
- });
- });
-
- it('should update buttons texts when it has note', done => {
- vm.note = 'Foo';
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.btn-comment-and-close').textContent.trim()).toEqual(
- 'Comment & close issue',
- );
-
- done();
- });
- });
-
- it('updates button text with noteable type', done => {
- vm.noteableType = constants.MERGE_REQUEST_NOTEABLE_TYPE;
-
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.btn-comment-and-close').textContent.trim()).toEqual(
- 'Close merge request',
- );
- done();
- });
- });
-
- describe('when clicking close/reopen button', () => {
- it('should disable button and show a loading spinner', done => {
- const toggleStateButton = vm.$el.querySelector('.js-action-button');
-
- toggleStateButton.click();
- Vue.nextTick(() => {
- expect(toggleStateButton.disabled).toEqual(true);
- expect(toggleStateButton.querySelector('.js-loading-button-icon')).not.toBeNull();
-
- done();
- });
- });
- });
-
- describe('when toggling state', () => {
- it('should update MR count', done => {
- spyOn(vm, 'closeIssue').and.returnValue(Promise.resolve());
-
- const updateMrCountSpy = spyOnDependency(CommentForm, 'refreshUserMergeRequestCounts');
- vm.toggleIssueState();
-
- Vue.nextTick(() => {
- expect(updateMrCountSpy).toHaveBeenCalled();
-
- done();
- });
- });
- });
- });
-
- describe('issue is confidential', () => {
- it('shows information warning', done => {
- store.dispatch('setNoteableData', Object.assign(noteableDataMock, { confidential: true }));
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.confidential-issue-warning')).toBeDefined();
- done();
- });
- });
- });
- });
-
- describe('user is not logged in', () => {
- beforeEach(() => {
- store.dispatch('setUserData', null);
- store.dispatch('setNoteableData', loggedOutnoteableData);
- store.dispatch('setNotesData', notesDataMock);
-
- vm = mountComponent();
- });
-
- it('should render signed out widget', () => {
- expect(vm.$el.textContent.replace(/\s+/g, ' ').trim()).toEqual(
- 'Please register or sign in to reply',
- );
- });
-
- it('should not render submission form', () => {
- expect(vm.$el.querySelector('textarea')).toEqual(null);
- });
- });
-});
diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js
index ea5c57b8a7c..ea1ed3da112 100644
--- a/spec/javascripts/notes/components/noteable_discussion_spec.js
+++ b/spec/javascripts/notes/components/noteable_discussion_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { mount, createLocalVue } from '@vue/test-utils';
import createStore from '~/notes/stores';
import noteableDiscussion from '~/notes/components/noteable_discussion.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
@@ -23,7 +23,7 @@ describe('noteable_discussion component', () => {
store.dispatch('setNotesData', notesDataMock);
const localVue = createLocalVue();
- wrapper = shallowMount(noteableDiscussion, {
+ wrapper = mount(noteableDiscussion, {
store,
propsData: { discussion: discussionMock },
localVue,
@@ -35,16 +35,6 @@ describe('noteable_discussion component', () => {
wrapper.destroy();
});
- it('should render user avatar', () => {
- const discussion = { ...discussionMock };
- discussion.diff_file = mockDiffFile;
- discussion.diff_discussion = true;
-
- wrapper.setProps({ discussion, renderDiffFile: true });
-
- expect(wrapper.find('.user-avatar-link').exists()).toBe(true);
- });
-
it('should not render thread header for non diff threads', () => {
expect(wrapper.find('.discussion-header').exists()).toBe(false);
});
@@ -134,105 +124,6 @@ describe('noteable_discussion component', () => {
});
});
- describe('action text', () => {
- const commitId = 'razupaltuff';
- const truncatedCommitId = commitId.substr(0, 8);
- let commitElement;
-
- beforeEach(done => {
- store.state.diffs = {
- projectPath: 'something',
- };
-
- wrapper.setProps({
- discussion: {
- ...discussionMock,
- for_commit: true,
- commit_id: commitId,
- diff_discussion: true,
- diff_file: {
- ...mockDiffFile,
- },
- },
- renderDiffFile: true,
- });
-
- wrapper.vm
- .$nextTick()
- .then(() => {
- commitElement = wrapper.find('.commit-sha');
- })
- .then(done)
- .catch(done.fail);
- });
-
- describe('for commit threads', () => {
- it('should display a monospace started a thread on commit', () => {
- expect(wrapper.text()).toContain(`started a thread on commit ${truncatedCommitId}`);
- expect(commitElement.exists()).toBe(true);
- expect(commitElement.text()).toContain(truncatedCommitId);
- });
- });
-
- describe('for diff thread with a commit id', () => {
- it('should display started thread on commit header', done => {
- wrapper.vm.discussion.for_commit = false;
-
- wrapper.vm.$nextTick(() => {
- expect(wrapper.text()).toContain(`started a thread on commit ${truncatedCommitId}`);
-
- expect(commitElement).not.toBe(null);
-
- done();
- });
- });
-
- it('should display outdated change on commit header', done => {
- wrapper.vm.discussion.for_commit = false;
- wrapper.vm.discussion.active = false;
-
- wrapper.vm.$nextTick(() => {
- expect(wrapper.text()).toContain(
- `started a thread on an outdated change in commit ${truncatedCommitId}`,
- );
-
- expect(commitElement).not.toBe(null);
-
- done();
- });
- });
- });
-
- describe('for diff threads without a commit id', () => {
- it('should show started a thread on the diff text', done => {
- Object.assign(wrapper.vm.discussion, {
- for_commit: false,
- commit_id: null,
- });
-
- wrapper.vm.$nextTick(() => {
- expect(wrapper.text()).toContain('started a thread on the diff');
-
- done();
- });
- });
-
- it('should show thread on older version text', done => {
- Object.assign(wrapper.vm.discussion, {
- for_commit: false,
- commit_id: null,
- active: false,
- });
-
- wrapper.vm.$nextTick(() => {
- expect(wrapper.text()).toContain('started a thread on an old version of the diff');
-
- done();
- });
- });
- });
- });
-
describe('for resolved thread', () => {
beforeEach(() => {
const discussion = getJSONFixture(discussionWithTwoUnresolvedNotes)[0];
@@ -262,6 +153,7 @@ describe('noteable_discussion component', () => {
}));
wrapper.setProps({ discussion });
+
wrapper.vm
.$nextTick()
.then(done)
diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js
index dc914ce8355..89e4553092a 100644
--- a/spec/javascripts/notes/mock_data.js
+++ b/spec/javascripts/notes/mock_data.js
@@ -1,1255 +1 @@
-// Copied to ee/spec/frontend/notes/mock_data.js
-
-export const notesDataMock = {
- discussionsPath: '/gitlab-org/gitlab-foss/issues/26/discussions.json',
- lastFetchedAt: 1501862675,
- markdownDocsPath: '/help/user/markdown',
- newSessionPath: '/users/sign_in?redirect_to_referer=yes',
- notesPath: '/gitlab-org/gitlab-foss/noteable/issue/98/notes',
- quickActionsDocsPath: '/help/user/project/quick_actions',
- registerPath: '/users/sign_in?redirect_to_referer=yes#register-pane',
- prerenderedNotesCount: 1,
- closePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=close',
- reopenPath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=reopen',
- canAwardEmoji: true,
-};
-
-export const userDataMock = {
- avatar_url: 'mock_path',
- id: 1,
- name: 'Root',
- path: '/root',
- state: 'active',
- username: 'root',
-};
-
-export const noteableDataMock = {
- assignees: [],
- author_id: 1,
- branch_name: null,
- confidential: false,
- create_note_path: '/gitlab-org/gitlab-foss/notes?target_id=98&target_type=issue',
- created_at: '2017-02-07T10:11:18.395Z',
- current_user: {
- can_create_note: true,
- can_update: true,
- can_award_emoji: true,
- },
- description: '',
- due_date: null,
- human_time_estimate: null,
- human_total_time_spent: null,
- id: 98,
- iid: 26,
- labels: [],
- lock_version: null,
- milestone: null,
- milestone_id: null,
- moved_to_id: null,
- preview_note_path: '/gitlab-org/gitlab-foss/preview_markdown?target_id=98&target_type=Issue',
- project_id: 2,
- state: 'opened',
- time_estimate: 0,
- title: '14',
- total_time_spent: 0,
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- updated_at: '2017-08-04T09:53:01.226Z',
- updated_by_id: 1,
- web_url: '/gitlab-org/gitlab-foss/issues/26',
- noteableType: 'issue',
-};
-
-export const lastFetchedAt = '1501862675';
-
-export const individualNote = {
- expanded: true,
- id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
- individual_note: true,
- notes: [
- {
- id: '1390',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: 'test',
- path: '/root',
- },
- created_at: '2017-08-01T17: 09: 33.762Z',
- updated_at: '2017-08-01T17: 09: 33.762Z',
- system: false,
- noteable_id: 98,
- noteable_type: 'Issue',
- type: null,
- human_access: 'Owner',
- note: 'sdfdsaf',
- note_html: "<p dir='auto'>sdfdsaf</p>",
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- },
- discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
- emoji_awardable: true,
- award_emoji: [
- { name: 'baseball', user: { id: 1, name: 'Root', username: 'root' } },
- { name: 'art', user: { id: 1, name: 'Root', username: 'root' } },
- ],
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1390/toggle_award_emoji',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- note_url: '/group/project/merge_requests/1#note_1',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390&user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1390',
- },
- ],
- reply_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
-};
-
-export const note = {
- id: '546',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2017-08-10T15:24:03.087Z',
- updated_at: '2017-08-10T15:24:03.087Z',
- system: false,
- noteable_id: 67,
- noteable_type: 'Issue',
- noteable_iid: 7,
- type: null,
- human_access: 'Owner',
- note: 'Vel id placeat reprehenderit sit numquam.',
- note_html: '<p dir="auto">Vel id placeat reprehenderit sit numquam.</p>',
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- },
- discussion_id: 'd3842a451b7f3d9a5dfce329515127b2d29a4cd0',
- emoji_awardable: true,
- award_emoji: [
- {
- name: 'baseball',
- user: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- },
- },
- {
- name: 'bath_tone3',
- user: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- },
- },
- ],
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/546/toggle_award_emoji',
- note_url: '/group/project/merge_requests/1#note_1',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_546&user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/546',
-};
-
-export const discussionMock = {
- id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
- reply_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
- expanded: true,
- notes: [
- {
- id: '1395',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-02T10:51:58.559Z',
- updated_at: '2017-08-02T10:51:58.559Z',
- system: false,
- noteable_id: 98,
- noteable_type: 'Issue',
- type: 'DiscussionNote',
- human_access: 'Owner',
- note: 'THIS IS A DICUSSSION!',
- note_html: "<p dir='auto'>THIS IS A DICUSSSION!</p>",
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- can_resolve: true,
- },
- discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
- emoji_awardable: true,
- award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1395/toggle_award_emoji',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1395&user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1395',
- },
- {
- id: '1396',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-02T10:56:50.980Z',
- updated_at: '2017-08-03T14:19:35.691Z',
- system: false,
- noteable_id: 98,
- noteable_type: 'Issue',
- type: 'DiscussionNote',
- human_access: 'Owner',
- note: 'sadfasdsdgdsf',
- note_html: "<p dir='auto'>sadfasdsdgdsf</p>",
- last_edited_at: '2017-08-03T14:19:35.691Z',
- last_edited_by: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- can_resolve: true,
- },
- discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
- emoji_awardable: true,
- award_emoji: [],
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1396/toggle_award_emoji',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1396&user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1396',
- },
- {
- id: '1437',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-03T18:11:18.780Z',
- updated_at: '2017-08-04T09:52:31.062Z',
- system: false,
- noteable_id: 98,
- noteable_type: 'Issue',
- type: 'DiscussionNote',
- human_access: 'Owner',
- note: 'adsfasf Should disappear',
- note_html: "<p dir='auto'>adsfasf Should disappear</p>",
- last_edited_at: '2017-08-04T09:52:31.062Z',
- last_edited_by: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- can_resolve: true,
- },
- discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
- emoji_awardable: true,
- award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1437/toggle_award_emoji',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1437&user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1437',
- },
- ],
- individual_note: false,
- resolvable: true,
- active: true,
-};
-
-export const loggedOutnoteableData = {
- id: '98',
- iid: 26,
- author_id: 1,
- description: '',
- lock_version: 1,
- milestone_id: null,
- state: 'opened',
- title: 'asdsa',
- updated_by_id: 1,
- created_at: '2017-02-07T10:11:18.395Z',
- updated_at: '2017-08-08T10:22:51.564Z',
- time_estimate: 0,
- total_time_spent: 0,
- human_time_estimate: null,
- human_total_time_spent: null,
- milestone: null,
- labels: [],
- branch_name: null,
- confidential: false,
- assignees: [
- {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- },
- ],
- due_date: null,
- moved_to_id: null,
- project_id: 2,
- web_url: '/gitlab-org/gitlab-foss/issues/26',
- current_user: {
- can_create_note: false,
- can_update: false,
- },
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- create_note_path: '/gitlab-org/gitlab-foss/notes?target_id=98&target_type=issue',
- preview_note_path: '/gitlab-org/gitlab-foss/preview_markdown?target_id=98&target_type=Issue',
-};
-
-export const collapseNotesMock = [
- {
- expanded: true,
- id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
- individual_note: true,
- notes: [
- {
- id: '1390',
- attachment: null,
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: 'test',
- path: '/root',
- },
- created_at: '2018-02-26T18:07:41.071Z',
- updated_at: '2018-02-26T18:07:41.071Z',
- system: true,
- system_note_icon_name: 'pencil',
- noteable_id: 98,
- noteable_type: 'Issue',
- type: null,
- human_access: 'Owner',
- note: 'changed the description',
- note_html: '<p dir="auto">changed the description</p>',
- current_user: { can_edit: false },
- discussion_id: 'b97fb7bda470a65b3e009377a9032edec0a4dd05',
- emoji_awardable: false,
- path: '/h5bp/html5-boilerplate/notes/1057',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
- },
- ],
- },
- {
- expanded: true,
- id: 'ffde43f25984ad7f2b4275135e0e2846875336c0',
- individual_note: true,
- notes: [
- {
- id: '1391',
- attachment: null,
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: 'test',
- path: '/root',
- },
- created_at: '2018-02-26T18:13:24.071Z',
- updated_at: '2018-02-26T18:13:24.071Z',
- system: true,
- system_note_icon_name: 'pencil',
- noteable_id: 99,
- noteable_type: 'Issue',
- type: null,
- human_access: 'Owner',
- note: 'changed the description',
- note_html: '<p dir="auto">changed the description</p>',
- current_user: { can_edit: false },
- discussion_id: '3eb958b4d81dec207ec3537a2f3bd8b9f271bb34',
- emoji_awardable: false,
- path: '/h5bp/html5-boilerplate/notes/1057',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fh5bp%2Fhtml5-boilerplate%2Fissues%2F10%23note_1057&user_id=1',
- },
- ],
- },
-];
-
-export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
- GET: {
- '/gitlab-org/gitlab-foss/issues/26/discussions.json': [
- {
- id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
- reply_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
- expanded: true,
- notes: [
- {
- id: '1390',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-01T17:09:33.762Z',
- updated_at: '2017-08-01T17:09:33.762Z',
- system: false,
- noteable_id: 98,
- noteable_type: 'Issue',
- type: null,
- human_access: 'Owner',
- note: 'sdfdsaf',
- note_html: '\u003cp dir="auto"\u003esdfdsaf\u003c/p\u003e',
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- },
- discussion_id: '0fb4e0e3f9276e55ff32eb4195add694aece4edd',
- emoji_awardable: true,
- award_emoji: [
- {
- name: 'baseball',
- user: {
- id: 1,
- name: 'Root',
- username: 'root',
- },
- },
- {
- name: 'art',
- user: {
- id: 1,
- name: 'Root',
- username: 'root',
- },
- },
- ],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1390/toggle_award_emoji',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390\u0026user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1390',
- },
- ],
- individual_note: true,
- },
- {
- id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
- reply_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
- expanded: true,
- notes: [
- {
- id: '1391',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-02T10:51:38.685Z',
- updated_at: '2017-08-02T10:51:38.685Z',
- system: false,
- noteable_id: 98,
- noteable_type: 'Issue',
- type: null,
- human_access: 'Owner',
- note: 'New note!',
- note_html: '\u003cp dir="auto"\u003eNew note!\u003c/p\u003e',
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- },
- discussion_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
- emoji_awardable: true,
- award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1391/toggle_award_emoji',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1391\u0026user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1391',
- },
- ],
- individual_note: true,
- },
- ],
- '/gitlab-org/gitlab-foss/noteable/issue/98/notes': {
- last_fetched_at: 1512900838,
- notes: [],
- },
- },
- PUT: {
- '/gitlab-org/gitlab-foss/notes/1471': {
- commands_changes: null,
- valid: true,
- id: '1471',
- attachment: null,
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-08T16:53:00.666Z',
- updated_at: '2017-12-10T11:03:21.876Z',
- system: false,
- noteable_id: 124,
- noteable_type: 'Issue',
- noteable_iid: 29,
- type: 'DiscussionNote',
- human_access: 'Owner',
- note: 'Adding a comment',
- note_html: '\u003cp dir="auto"\u003eAdding a comment\u003c/p\u003e',
- last_edited_at: '2017-12-10T11:03:21.876Z',
- last_edited_by: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- },
- discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
- emoji_awardable: true,
- award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1471/toggle_award_emoji',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1471',
- },
- },
-};
-
-export const DISCUSSION_NOTE_RESPONSE_MAP = {
- ...INDIVIDUAL_NOTE_RESPONSE_MAP,
- GET: {
- ...INDIVIDUAL_NOTE_RESPONSE_MAP.GET,
- '/gitlab-org/gitlab-foss/issues/26/discussions.json': [
- {
- id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
- reply_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
- expanded: true,
- notes: [
- {
- id: '1471',
- attachment: {
- url: null,
- filename: null,
- image: false,
- },
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatar_url: null,
- path: '/root',
- },
- created_at: '2017-08-08T16:53:00.666Z',
- updated_at: '2017-08-08T16:53:00.666Z',
- system: false,
- noteable_id: 124,
- noteable_type: 'Issue',
- noteable_iid: 29,
- type: 'DiscussionNote',
- human_access: 'Owner',
- note: 'Adding a comment',
- note_html: '\u003cp dir="auto"\u003eAdding a comment\u003c/p\u003e',
- current_user: {
- can_edit: true,
- can_award_emoji: true,
- },
- discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
- emoji_awardable: true,
- award_emoji: [],
- toggle_award_path: '/gitlab-org/gitlab-foss/notes/1471/toggle_award_emoji',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1',
- path: '/gitlab-org/gitlab-foss/notes/1471',
- },
- ],
- individual_note: false,
- },
- ],
- },
-};
-
-export function getIndividualNoteResponse(config) {
- return [200, INDIVIDUAL_NOTE_RESPONSE_MAP[config.method.toUpperCase()][config.url]];
-}
-
-export function getDiscussionNoteResponse(config) {
- return [200, DISCUSSION_NOTE_RESPONSE_MAP[config.method.toUpperCase()][config.url]];
-}
-
-export const notesWithDescriptionChanges = [
- {
- id: '39b271c2033e9ed43d8edb393702f65f7a830459',
- reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
- expanded: true,
- notes: [
- {
- id: '901',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:05:36.117Z',
- updated_at: '2018-05-29T12:05:36.117Z',
- system: false,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note:
- 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
- note_html:
- '<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
- current_user: { can_edit: true, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
- emoji_awardable: true,
- award_emoji: [],
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
- human_access: 'Owner',
- toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
- path: '/gitlab-org/gitlab-shell/notes/901',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
- reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
- expanded: true,
- notes: [
- {
- id: '902',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:05:58.694Z',
- updated_at: '2018-05-29T12:05:58.694Z',
- system: false,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note:
- 'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
- note_html:
- '<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
- current_user: { can_edit: true, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
- emoji_awardable: true,
- award_emoji: [],
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
- human_access: 'Owner',
- toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
- path: '/gitlab-org/gitlab-shell/notes/902',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '7f1feda384083eb31763366e6392399fde6f3f31',
- reply_id: '7f1feda384083eb31763366e6392399fde6f3f31',
- expanded: true,
- notes: [
- {
- id: '903',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:06:05.772Z',
- updated_at: '2018-05-29T12:06:05.772Z',
- system: true,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'changed the description',
- note_html: '<p dir="auto">changed the description</p>',
- current_user: { can_edit: false, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- system_note_icon_name: 'pencil-square',
- discussion_id: '7f1feda384083eb31763366e6392399fde6f3f31',
- emoji_awardable: false,
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_903&user_id=1',
- human_access: 'Owner',
- path: '/gitlab-org/gitlab-shell/notes/903',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
- reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
- expanded: true,
- notes: [
- {
- id: '904',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:06:16.112Z',
- updated_at: '2018-05-29T12:06:16.112Z',
- system: false,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'Ullamcorper eget nulla facilisi etiam',
- note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
- current_user: { can_edit: true, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
- emoji_awardable: true,
- award_emoji: [],
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
- human_access: 'Owner',
- toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
- path: '/gitlab-org/gitlab-shell/notes/904',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
- reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
- expanded: true,
- notes: [
- {
- id: '905',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:06:28.851Z',
- updated_at: '2018-05-29T12:06:28.851Z',
- system: true,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'changed the description',
- note_html: '<p dir="auto">changed the description</p>',
- current_user: { can_edit: false, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- system_note_icon_name: 'pencil-square',
- discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
- emoji_awardable: false,
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
- human_access: 'Owner',
- path: '/gitlab-org/gitlab-shell/notes/905',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '70411b08cdfc01f24187a06d77daa33464cb2620',
- reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
- expanded: true,
- notes: [
- {
- id: '906',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:20:02.925Z',
- updated_at: '2018-05-29T12:20:02.925Z',
- system: true,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'changed the description',
- note_html: '<p dir="auto">changed the description</p>',
- current_user: { can_edit: false, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- system_note_icon_name: 'pencil-square',
- discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
- emoji_awardable: false,
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
- human_access: 'Owner',
- path: '/gitlab-org/gitlab-shell/notes/906',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
-];
-
-export const collapsedSystemNotes = [
- {
- id: '39b271c2033e9ed43d8edb393702f65f7a830459',
- reply_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
- expanded: true,
- notes: [
- {
- id: '901',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:05:36.117Z',
- updated_at: '2018-05-29T12:05:36.117Z',
- system: false,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note:
- 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
- note_html:
- '<p dir="auto">Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.</p>',
- current_user: { can_edit: true, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- discussion_id: '39b271c2033e9ed43d8edb393702f65f7a830459',
- emoji_awardable: true,
- award_emoji: [],
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_901&user_id=1',
- human_access: 'Owner',
- toggle_award_path: '/gitlab-org/gitlab-shell/notes/901/toggle_award_emoji',
- path: '/gitlab-org/gitlab-shell/notes/901',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
- reply_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
- expanded: true,
- notes: [
- {
- id: '902',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:05:58.694Z',
- updated_at: '2018-05-29T12:05:58.694Z',
- system: false,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note:
- 'Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.',
- note_html:
- '<p dir="auto">Varius vel pharetra vel turpis nunc eget lorem. Ipsum dolor sit amet consectetur adipiscing.</p>',
- current_user: { can_edit: true, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- discussion_id: '4852335d7dc40b9ceb8fde1a2bb9c1b67e4c7795',
- emoji_awardable: true,
- award_emoji: [],
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_902&user_id=1',
- human_access: 'Owner',
- toggle_award_path: '/gitlab-org/gitlab-shell/notes/902/toggle_award_emoji',
- path: '/gitlab-org/gitlab-shell/notes/902',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
- reply_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
- expanded: true,
- notes: [
- {
- id: '904',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:06:16.112Z',
- updated_at: '2018-05-29T12:06:16.112Z',
- system: false,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'Ullamcorper eget nulla facilisi etiam',
- note_html: '<p dir="auto">Ullamcorper eget nulla facilisi etiam</p>',
- current_user: { can_edit: true, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- discussion_id: '091865fe3ae20f0045234a3d103e3b15e73405b5',
- emoji_awardable: true,
- award_emoji: [],
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_904&user_id=1',
- human_access: 'Owner',
- toggle_award_path: '/gitlab-org/gitlab-shell/notes/904/toggle_award_emoji',
- path: '/gitlab-org/gitlab-shell/notes/904',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
- reply_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
- expanded: true,
- notes: [
- {
- id: '905',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:06:28.851Z',
- updated_at: '2018-05-29T12:06:28.851Z',
- system: true,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'changed the description',
- note_html: ' <p dir="auto">changed the description 2 times within 1 minute </p>',
- current_user: { can_edit: false, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- system_note_icon_name: 'pencil-square',
- discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
- emoji_awardable: false,
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_905&user_id=1',
- human_access: 'Owner',
- path: '/gitlab-org/gitlab-shell/notes/905',
- times_updated: 2,
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
- {
- id: '70411b08cdfc01f24187a06d77daa33464cb2620',
- reply_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
- expanded: true,
- notes: [
- {
- id: '906',
- type: null,
- attachment: null,
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- path: '/root',
- },
- created_at: '2018-05-29T12:20:02.925Z',
- updated_at: '2018-05-29T12:20:02.925Z',
- system: true,
- noteable_id: 182,
- noteable_type: 'Issue',
- resolvable: false,
- noteable_iid: 12,
- note: 'changed the description',
- note_html: '<p dir="auto">changed the description</p>',
- current_user: { can_edit: false, can_award_emoji: true },
- resolved: false,
- resolved_by: null,
- system_note_icon_name: 'pencil-square',
- discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
- emoji_awardable: false,
- report_abuse_path:
- '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-shell%2Fissues%2F12%23note_906&user_id=1',
- human_access: 'Owner',
- path: '/gitlab-org/gitlab-shell/notes/906',
- },
- ],
- individual_note: true,
- resolvable: false,
- resolved: false,
- diff_discussion: false,
- },
-];
-
-export const discussion1 = {
- id: 'abc1',
- resolvable: true,
- resolved: false,
- active: true,
- diff_file: {
- file_path: 'about.md',
- },
- position: {
- new_line: 50,
- old_line: null,
- },
- notes: [
- {
- created_at: '2018-07-04T16:25:41.749Z',
- },
- ],
-};
-
-export const resolvedDiscussion1 = {
- id: 'abc1',
- resolvable: true,
- resolved: true,
- diff_file: {
- file_path: 'about.md',
- },
- position: {
- new_line: 50,
- old_line: null,
- },
- notes: [
- {
- created_at: '2018-07-04T16:25:41.749Z',
- },
- ],
-};
-
-export const discussion2 = {
- id: 'abc2',
- resolvable: true,
- resolved: false,
- active: true,
- diff_file: {
- file_path: 'README.md',
- },
- position: {
- new_line: null,
- old_line: 20,
- },
- notes: [
- {
- created_at: '2018-07-04T12:05:41.749Z',
- },
- ],
-};
-
-export const discussion3 = {
- id: 'abc3',
- resolvable: true,
- active: true,
- resolved: false,
- diff_file: {
- file_path: 'README.md',
- },
- position: {
- new_line: 21,
- old_line: null,
- },
- notes: [
- {
- created_at: '2018-07-05T17:25:41.749Z',
- },
- ],
-};
-
-export const unresolvableDiscussion = {
- resolvable: false,
-};
-
-export const discussionFiltersMock = [
- {
- title: 'Show all activity',
- value: 0,
- },
- {
- title: 'Show comments only',
- value: 1,
- },
- {
- title: 'Show system notes only',
- value: 2,
- },
-];
+export * from '../../frontend/notes/mock_data.js';
diff --git a/spec/javascripts/notes/stores/collapse_utils_spec.js b/spec/javascripts/notes/stores/collapse_utils_spec.js
index 8ede9319088..d3019f4b9a4 100644
--- a/spec/javascripts/notes/stores/collapse_utils_spec.js
+++ b/spec/javascripts/notes/stores/collapse_utils_spec.js
@@ -1,6 +1,5 @@
import {
isDescriptionSystemNote,
- changeDescriptionNote,
getTimeDifferenceMinutes,
collapseSystemNotes,
} from '~/notes/stores/collapse_utils';
@@ -24,15 +23,6 @@ describe('Collapse utils', () => {
);
});
- it('changes the description to contain the number of changed times', () => {
- const changedNote = changeDescriptionNote(mockSystemNote, 3, 5);
-
- expect(changedNote.times_updated).toEqual(3);
- expect(changedNote.note_html.trim()).toContain(
- '<p dir="auto">changed the description 3 times within 5 minutes </p>',
- );
- });
-
it('gets the time difference between two notes', () => {
const anotherSystemNote = {
created_at: '2018-05-14T21:33:00.000Z',
diff --git a/spec/javascripts/pipelines/graph/action_component_spec.js b/spec/javascripts/pipelines/graph/action_component_spec.js
deleted file mode 100644
index 321497b35b5..00000000000
--- a/spec/javascripts/pipelines/graph/action_component_spec.js
+++ /dev/null
@@ -1,81 +0,0 @@
-import Vue from 'vue';
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
-import actionComponent from '~/pipelines/components/graph/action_component.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
-
-describe('pipeline graph action component', () => {
- let component;
- let mock;
-
- beforeEach(done => {
- const ActionComponent = Vue.extend(actionComponent);
- mock = new MockAdapter(axios);
-
- mock.onPost('foo.json').reply(200);
-
- component = mountComponent(ActionComponent, {
- tooltipText: 'bar',
- link: 'foo',
- actionIcon: 'cancel',
- });
-
- Vue.nextTick(done);
- });
-
- afterEach(() => {
- mock.restore();
- component.$destroy();
- });
-
- it('should render the provided title as a bootstrap tooltip', () => {
- expect(component.$el.getAttribute('data-original-title')).toEqual('bar');
- });
-
- it('should update bootstrap tooltip when title changes', done => {
- component.tooltipText = 'changed';
-
- component
- .$nextTick()
- .then(() => {
- expect(component.$el.getAttribute('data-original-title')).toBe('changed');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should render an svg', () => {
- expect(component.$el.querySelector('.ci-action-icon-wrapper')).toBeDefined();
- expect(component.$el.querySelector('svg')).toBeDefined();
- });
-
- describe('on click', () => {
- it('emits `pipelineActionRequestComplete` after a successful request', done => {
- spyOn(component, '$emit');
-
- component.$el.click();
-
- setTimeout(() => {
- component
- .$nextTick()
- .then(() => {
- expect(component.$emit).toHaveBeenCalledWith('pipelineActionRequestComplete');
- })
- .catch(done.fail);
-
- done();
- }, 0);
- });
-
- it('renders a loading icon while waiting for request', done => {
- component.$el.click();
-
- component.$nextTick(() => {
- expect(component.$el.querySelector('.js-action-icon-loading')).not.toBeNull();
- setTimeout(() => {
- done();
- });
- });
- });
- });
-});
diff --git a/spec/javascripts/raven/raven_config_spec.js b/spec/javascripts/raven/raven_config_spec.js
deleted file mode 100644
index af634a0c196..00000000000
--- a/spec/javascripts/raven/raven_config_spec.js
+++ /dev/null
@@ -1,254 +0,0 @@
-import Raven from 'raven-js';
-import RavenConfig from '~/raven/raven_config';
-
-describe('RavenConfig', () => {
- describe('IGNORE_ERRORS', () => {
- it('should be an array of strings', () => {
- const areStrings = RavenConfig.IGNORE_ERRORS.every(error => typeof error === 'string');
-
- expect(areStrings).toBe(true);
- });
- });
-
- describe('IGNORE_URLS', () => {
- it('should be an array of regexps', () => {
- const areRegExps = RavenConfig.IGNORE_URLS.every(url => url instanceof RegExp);
-
- expect(areRegExps).toBe(true);
- });
- });
-
- describe('SAMPLE_RATE', () => {
- it('should be a finite number', () => {
- expect(typeof RavenConfig.SAMPLE_RATE).toEqual('number');
- });
- });
-
- describe('init', () => {
- const options = {
- currentUserId: 1,
- };
-
- beforeEach(() => {
- spyOn(RavenConfig, 'configure');
- spyOn(RavenConfig, 'bindRavenErrors');
- spyOn(RavenConfig, 'setUser');
-
- RavenConfig.init(options);
- });
-
- it('should set the options property', () => {
- expect(RavenConfig.options).toEqual(options);
- });
-
- it('should call the configure method', () => {
- expect(RavenConfig.configure).toHaveBeenCalled();
- });
-
- it('should call the error bindings method', () => {
- expect(RavenConfig.bindRavenErrors).toHaveBeenCalled();
- });
-
- it('should call setUser', () => {
- expect(RavenConfig.setUser).toHaveBeenCalled();
- });
-
- it('should not call setUser if there is no current user ID', () => {
- RavenConfig.setUser.calls.reset();
-
- options.currentUserId = undefined;
-
- RavenConfig.init(options);
-
- expect(RavenConfig.setUser).not.toHaveBeenCalled();
- });
- });
-
- describe('configure', () => {
- let raven;
- let ravenConfig;
- const options = {
- sentryDsn: '//sentryDsn',
- whitelistUrls: ['//gitlabUrl', 'webpack-internal://'],
- environment: 'test',
- release: 'revision',
- tags: {
- revision: 'revision',
- },
- };
-
- beforeEach(() => {
- ravenConfig = jasmine.createSpyObj('ravenConfig', ['shouldSendSample']);
- raven = jasmine.createSpyObj('raven', ['install']);
-
- spyOn(Raven, 'config').and.returnValue(raven);
-
- ravenConfig.options = options;
- ravenConfig.IGNORE_ERRORS = 'ignore_errors';
- ravenConfig.IGNORE_URLS = 'ignore_urls';
-
- RavenConfig.configure.call(ravenConfig);
- });
-
- it('should call Raven.config', () => {
- expect(Raven.config).toHaveBeenCalledWith(options.sentryDsn, {
- release: options.release,
- tags: options.tags,
- whitelistUrls: options.whitelistUrls,
- environment: 'test',
- ignoreErrors: ravenConfig.IGNORE_ERRORS,
- ignoreUrls: ravenConfig.IGNORE_URLS,
- shouldSendCallback: jasmine.any(Function),
- });
- });
-
- it('should call Raven.install', () => {
- expect(raven.install).toHaveBeenCalled();
- });
-
- it('should set environment from options', () => {
- ravenConfig.options.environment = 'development';
-
- RavenConfig.configure.call(ravenConfig);
-
- expect(Raven.config).toHaveBeenCalledWith(options.sentryDsn, {
- release: options.release,
- tags: options.tags,
- whitelistUrls: options.whitelistUrls,
- environment: 'development',
- ignoreErrors: ravenConfig.IGNORE_ERRORS,
- ignoreUrls: ravenConfig.IGNORE_URLS,
- shouldSendCallback: jasmine.any(Function),
- });
- });
- });
-
- describe('setUser', () => {
- let ravenConfig;
-
- beforeEach(() => {
- ravenConfig = { options: { currentUserId: 1 } };
- spyOn(Raven, 'setUserContext');
-
- RavenConfig.setUser.call(ravenConfig);
- });
-
- it('should call .setUserContext', function() {
- expect(Raven.setUserContext).toHaveBeenCalledWith({
- id: ravenConfig.options.currentUserId,
- });
- });
- });
-
- describe('handleRavenErrors', () => {
- let event;
- let req;
- let config;
- let err;
-
- beforeEach(() => {
- event = {};
- req = { status: 'status', responseText: 'responseText', statusText: 'statusText' };
- config = { type: 'type', url: 'url', data: 'data' };
- err = {};
-
- spyOn(Raven, 'captureMessage');
-
- RavenConfig.handleRavenErrors(event, req, config, err);
- });
-
- it('should call Raven.captureMessage', () => {
- expect(Raven.captureMessage).toHaveBeenCalledWith(err, {
- extra: {
- type: config.type,
- url: config.url,
- data: config.data,
- status: req.status,
- response: req.responseText,
- error: err,
- event,
- },
- });
- });
-
- describe('if no err is provided', () => {
- beforeEach(() => {
- Raven.captureMessage.calls.reset();
-
- RavenConfig.handleRavenErrors(event, req, config);
- });
-
- it('should use req.statusText as the error value', () => {
- expect(Raven.captureMessage).toHaveBeenCalledWith(req.statusText, {
- extra: {
- type: config.type,
- url: config.url,
- data: config.data,
- status: req.status,
- response: req.responseText,
- error: req.statusText,
- event,
- },
- });
- });
- });
-
- describe('if no req.responseText is provided', () => {
- beforeEach(() => {
- req.responseText = undefined;
-
- Raven.captureMessage.calls.reset();
-
- RavenConfig.handleRavenErrors(event, req, config, err);
- });
-
- it('should use `Unknown response text` as the response', () => {
- expect(Raven.captureMessage).toHaveBeenCalledWith(err, {
- extra: {
- type: config.type,
- url: config.url,
- data: config.data,
- status: req.status,
- response: 'Unknown response text',
- error: err,
- event,
- },
- });
- });
- });
- });
-
- describe('shouldSendSample', () => {
- let randomNumber;
-
- beforeEach(() => {
- RavenConfig.SAMPLE_RATE = 50;
-
- spyOn(Math, 'random').and.callFake(() => randomNumber);
- });
-
- it('should call Math.random', () => {
- RavenConfig.shouldSendSample();
-
- expect(Math.random).toHaveBeenCalled();
- });
-
- it('should return true if the sample rate is greater than the random number * 100', () => {
- randomNumber = 0.1;
-
- expect(RavenConfig.shouldSendSample()).toBe(true);
- });
-
- it('should return false if the sample rate is less than the random number * 100', () => {
- randomNumber = 0.9;
-
- expect(RavenConfig.shouldSendSample()).toBe(false);
- });
-
- it('should return true if the sample rate is equal to the random number * 100', () => {
- randomNumber = 0.5;
-
- expect(RavenConfig.shouldSendSample()).toBe(true);
- });
- });
-});
diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js
index 9702cb56d99..1798f9962e2 100644
--- a/spec/javascripts/search_autocomplete_spec.js
+++ b/spec/javascripts/search_autocomplete_spec.js
@@ -1,4 +1,4 @@
-/* eslint-disable no-var, one-var, no-unused-expressions, consistent-return, no-param-reassign, default-case, no-return-assign, vars-on-top */
+/* eslint-disable no-unused-expressions, consistent-return, no-param-reassign, default-case, no-return-assign */
import $ from 'jquery';
import '~/gl_dropdown';
@@ -6,41 +6,27 @@ import initSearchAutocomplete from '~/search_autocomplete';
import '~/lib/utils/common_utils';
describe('Search autocomplete dropdown', () => {
- var assertLinks,
- dashboardIssuesPath,
- dashboardMRsPath,
- groupIssuesPath,
- groupMRsPath,
- groupName,
- mockDashboardOptions,
- mockGroupOptions,
- mockProjectOptions,
- projectIssuesPath,
- projectMRsPath,
- projectName,
- userId,
- widget;
- var userName = 'root';
+ let widget = null;
- widget = null;
+ const userName = 'root';
- userId = 1;
+ const userId = 1;
- dashboardIssuesPath = '/dashboard/issues';
+ const dashboardIssuesPath = '/dashboard/issues';
- dashboardMRsPath = '/dashboard/merge_requests';
+ const dashboardMRsPath = '/dashboard/merge_requests';
- projectIssuesPath = '/gitlab-org/gitlab-foss/issues';
+ const projectIssuesPath = '/gitlab-org/gitlab-foss/issues';
- projectMRsPath = '/gitlab-org/gitlab-foss/merge_requests';
+ const projectMRsPath = '/gitlab-org/gitlab-foss/merge_requests';
- groupIssuesPath = '/groups/gitlab-org/issues';
+ const groupIssuesPath = '/groups/gitlab-org/issues';
- groupMRsPath = '/groups/gitlab-org/merge_requests';
+ const groupMRsPath = '/groups/gitlab-org/merge_requests';
- projectName = 'GitLab Community Edition';
+ const projectName = 'GitLab Community Edition';
- groupName = 'Gitlab Org';
+ const groupName = 'Gitlab Org';
const removeBodyAttributes = function() {
const $body = $('body');
@@ -76,7 +62,7 @@ describe('Search autocomplete dropdown', () => {
};
// Mock `gl` object in window for dashboard specific page. App code will need it.
- mockDashboardOptions = function() {
+ const mockDashboardOptions = function() {
window.gl || (window.gl = {});
return (window.gl.dashboardOptions = {
issuesPath: dashboardIssuesPath,
@@ -85,7 +71,7 @@ describe('Search autocomplete dropdown', () => {
};
// Mock `gl` object in window for project specific page. App code will need it.
- mockProjectOptions = function() {
+ const mockProjectOptions = function() {
window.gl || (window.gl = {});
return (window.gl.projectOptions = {
'gitlab-ce': {
@@ -96,7 +82,7 @@ describe('Search autocomplete dropdown', () => {
});
};
- mockGroupOptions = function() {
+ const mockGroupOptions = function() {
window.gl || (window.gl = {});
return (window.gl.groupOptions = {
'gitlab-org': {
@@ -107,7 +93,7 @@ describe('Search autocomplete dropdown', () => {
});
};
- assertLinks = function(list, issuesPath, mrsPath) {
+ const assertLinks = function(list, issuesPath, mrsPath) {
if (issuesPath) {
const issuesAssignedToMeLink = `a[href="${issuesPath}/?assignee_username=${userName}"]`;
const issuesIHaveCreatedLink = `a[href="${issuesPath}/?author_username=${userName}"]`;
@@ -144,29 +130,26 @@ describe('Search autocomplete dropdown', () => {
});
it('should show Dashboard specific dropdown menu', function() {
- var list;
addBodyAttributes();
mockDashboardOptions();
widget.searchInput.triggerHandler('focus');
- list = widget.wrap.find('.dropdown-menu').find('ul');
+ const list = widget.wrap.find('.dropdown-menu').find('ul');
return assertLinks(list, dashboardIssuesPath, dashboardMRsPath);
});
it('should show Group specific dropdown menu', function() {
- var list;
addBodyAttributes('group');
mockGroupOptions();
widget.searchInput.triggerHandler('focus');
- list = widget.wrap.find('.dropdown-menu').find('ul');
+ const list = widget.wrap.find('.dropdown-menu').find('ul');
return assertLinks(list, groupIssuesPath, groupMRsPath);
});
it('should show Project specific dropdown menu', function() {
- var list;
addBodyAttributes('project');
mockProjectOptions();
widget.searchInput.triggerHandler('focus');
- list = widget.wrap.find('.dropdown-menu').find('ul');
+ const list = widget.wrap.find('.dropdown-menu').find('ul');
return assertLinks(list, projectIssuesPath, projectMRsPath);
});
@@ -180,26 +163,25 @@ describe('Search autocomplete dropdown', () => {
});
it('should not show category related menu if there is text in the input', function() {
- var link, list;
addBodyAttributes('project');
mockProjectOptions();
widget.searchInput.val('help');
widget.searchInput.triggerHandler('focus');
- list = widget.wrap.find('.dropdown-menu').find('ul');
- link = `a[href='${projectIssuesPath}/?assignee_username=${userName}']`;
+ const list = widget.wrap.find('.dropdown-menu').find('ul');
+ const link = `a[href='${projectIssuesPath}/?assignee_username=${userName}']`;
expect(list.find(link).length).toBe(0);
});
it('should not submit the search form when selecting an autocomplete row with the keyboard', function() {
- var ENTER = 13;
- var DOWN = 40;
+ const ENTER = 13;
+ const DOWN = 40;
addBodyAttributes();
mockDashboardOptions(true);
- var submitSpy = spyOnEvent('form', 'submit');
+ const submitSpy = spyOnEvent('form', 'submit');
widget.searchInput.triggerHandler('focus');
widget.wrap.trigger($.Event('keydown', { which: DOWN }));
- var enterKeyEvent = $.Event('keydown', { which: ENTER });
+ const enterKeyEvent = $.Event('keydown', { which: ENTER });
widget.searchInput.trigger(enterKeyEvent);
// This does not currently catch failing behavior. For security reasons,
// browsers will not trigger default behavior (form submit, in this
diff --git a/spec/javascripts/sidebar/subscriptions_spec.js b/spec/javascripts/sidebar/subscriptions_spec.js
index a97608d6b8a..1256852c472 100644
--- a/spec/javascripts/sidebar/subscriptions_spec.js
+++ b/spec/javascripts/sidebar/subscriptions_spec.js
@@ -76,4 +76,25 @@ describe('Subscriptions', function() {
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
+
+ describe('given project emails are disabled', () => {
+ const subscribeDisabledDescription = 'Notifications have been disabled';
+
+ beforeEach(() => {
+ vm = mountComponent(Subscriptions, {
+ subscribed: false,
+ projectEmailsDisabled: true,
+ subscribeDisabledDescription,
+ });
+ });
+
+ it('sets the correct display text', () => {
+ expect(vm.$el.textContent).toContain(subscribeDisabledDescription);
+ expect(vm.$refs.tooltip.dataset.originalTitle).toBe(subscribeDisabledDescription);
+ });
+
+ it('does not render the toggle button', () => {
+ expect(vm.$refs.toggleButton).toBeUndefined();
+ });
+ });
});
diff --git a/spec/javascripts/signin_tabs_memoizer_spec.js b/spec/javascripts/signin_tabs_memoizer_spec.js
index ef5c774736b..966ae55ce14 100644
--- a/spec/javascripts/signin_tabs_memoizer_spec.js
+++ b/spec/javascripts/signin_tabs_memoizer_spec.js
@@ -1,5 +1,7 @@
import AccessorUtilities from '~/lib/utils/accessor';
import SigninTabsMemoizer from '~/pages/sessions/new/signin_tabs_memoizer';
+import trackData from '~/pages/sessions/new/index';
+import Tracking from '~/tracking';
describe('SigninTabsMemoizer', () => {
const fixtureTemplate = 'static/signin_tabs.html';
@@ -93,6 +95,50 @@ describe('SigninTabsMemoizer', () => {
});
});
+ describe('trackData', () => {
+ beforeEach(() => {
+ spyOn(Tracking, 'event');
+ });
+
+ describe('with tracking data', () => {
+ beforeEach(() => {
+ gon.tracking_data = {
+ category: 'Growth::Acquisition::Experiment::SignUpFlow',
+ action: 'start',
+ label: 'uuid',
+ property: 'control_group',
+ };
+ trackData();
+ });
+
+ it('should track data when the "click" event of the register tab is triggered', () => {
+ document.querySelector('a[href="#register-pane"]').click();
+
+ expect(Tracking.event).toHaveBeenCalledWith(
+ 'Growth::Acquisition::Experiment::SignUpFlow',
+ 'start',
+ {
+ label: 'uuid',
+ property: 'control_group',
+ },
+ );
+ });
+ });
+
+ describe('without tracking data', () => {
+ beforeEach(() => {
+ gon.tracking_data = undefined;
+ trackData();
+ });
+
+ it('should not track data when the "click" event of the register tab is triggered', () => {
+ document.querySelector('a[href="#register-pane"]').click();
+
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+ });
+ });
+
describe('saveData', () => {
beforeEach(() => {
memo = {
diff --git a/spec/javascripts/syntax_highlight_spec.js b/spec/javascripts/syntax_highlight_spec.js
index 5438368ccbe..99c47fa31d4 100644
--- a/spec/javascripts/syntax_highlight_spec.js
+++ b/spec/javascripts/syntax_highlight_spec.js
@@ -1,11 +1,10 @@
-/* eslint-disable no-var, no-return-assign */
+/* eslint-disable no-return-assign */
import $ from 'jquery';
import syntaxHighlight from '~/syntax_highlight';
describe('Syntax Highlighter', function() {
- var stubUserColorScheme;
- stubUserColorScheme = function(value) {
+ const stubUserColorScheme = function(value) {
if (window.gon == null) {
window.gon = {};
}
@@ -40,9 +39,8 @@ describe('Syntax Highlighter', function() {
});
it('prevents an infinite loop when no matches exist', function() {
- var highlight;
setFixtures('<div></div>');
- highlight = function() {
+ const highlight = function() {
return syntaxHighlight($('div'));
};
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index cb6b158f01c..859745ee9fc 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -171,38 +171,7 @@ describe('test errors', () => {
// see: https://github.com/deepsweet/istanbul-instrumenter-loader/issues/15
if (process.env.BABEL_ENV === 'coverage') {
// exempt these files from the coverage report
- const troubleMakers = [
- './blob_edit/blob_bundle.js',
- './boards/components/modal/empty_state.vue',
- './boards/components/modal/footer.js',
- './boards/components/modal/header.js',
- './cycle_analytics/cycle_analytics_bundle.js',
- './cycle_analytics/components/stage_plan_component.js',
- './cycle_analytics/components/stage_staging_component.js',
- './cycle_analytics/components/stage_test_component.js',
- './commit/pipelines/pipelines_bundle.js',
- './diff_notes/diff_notes_bundle.js',
- './diff_notes/components/jump_to_discussion.js',
- './diff_notes/components/resolve_count.js',
- './dispatcher.js',
- './environments/environments_bundle.js',
- './graphs/graphs_bundle.js',
- './issuable/time_tracking/time_tracking_bundle.js',
- './main.js',
- './merge_conflicts/merge_conflicts_bundle.js',
- './merge_conflicts/components/inline_conflict_lines.js',
- './merge_conflicts/components/parallel_conflict_lines.js',
- './monitoring/monitoring_bundle.js',
- './network/network_bundle.js',
- './network/branch_graph.js',
- './profile/profile_bundle.js',
- './protected_branches/protected_branches_bundle.js',
- './snippet/snippet_bundle.js',
- './terminal/terminal_bundle.js',
- './users/users_bundle.js',
- './issue_show/index.js',
- './pages/admin/application_settings/general/index.js',
- ];
+ const troubleMakers = ['./pages/admin/application_settings/general/index.js'];
describe('Uncovered files', function() {
const sourceFilesContexts = [require.context('~', true, /\.(js|vue)$/)];
diff --git a/spec/javascripts/u2f/mock_u2f_device.js b/spec/javascripts/u2f/mock_u2f_device.js
index 26ddd8ade61..ec8425a4e3e 100644
--- a/spec/javascripts/u2f/mock_u2f_device.js
+++ b/spec/javascripts/u2f/mock_u2f_device.js
@@ -1,20 +1,16 @@
-/* eslint-disable no-unused-expressions, no-return-assign, no-param-reassign */
+/* eslint-disable no-unused-expressions */
export default class MockU2FDevice {
constructor() {
this.respondToAuthenticateRequest = this.respondToAuthenticateRequest.bind(this);
this.respondToRegisterRequest = this.respondToRegisterRequest.bind(this);
window.u2f || (window.u2f = {});
- window.u2f.register = (function(_this) {
- return function(appId, registerRequests, signRequests, callback) {
- return (_this.registerCallback = callback);
- };
- })(this);
- window.u2f.sign = (function(_this) {
- return function(appId, challenges, signRequests, callback) {
- return (_this.authenticateCallback = callback);
- };
- })(this);
+ window.u2f.register = (appId, registerRequests, signRequests, callback) => {
+ this.registerCallback = callback;
+ };
+ window.u2f.sign = (appId, challenges, signRequests, callback) => {
+ this.authenticateCallback = callback;
+ };
}
respondToRegisterRequest(params) {
diff --git a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
index bdf802052b9..16997e9dc67 100644
--- a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
@@ -70,4 +70,30 @@ describe('ContentViewer', () => {
done();
});
});
+
+ it('markdown preview receives the file path as a parameter', done => {
+ mock = new MockAdapter(axios);
+ spyOn(axios, 'post').and.callThrough();
+ mock.onPost(`${gon.relative_url_root}/testproject/preview_markdown`).reply(200, {
+ body: '<b>testing</b>',
+ });
+
+ createComponent({
+ path: 'test.md',
+ content: '* Test',
+ projectPath: 'testproject',
+ type: 'markdown',
+ filePath: 'foo/test.md',
+ });
+
+ setTimeout(() => {
+ expect(axios.post).toHaveBeenCalledWith(
+ `${gon.relative_url_root}/testproject/preview_markdown`,
+ { path: 'foo/test.md', text: '* Test' },
+ jasmine.any(Object),
+ );
+
+ done();
+ });
+ });
});
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
index 660eaddf01f..1acd6b3ebe7 100644
--- a/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
@@ -1,13 +1,23 @@
import Vue from 'vue';
+
import diffViewer from '~/vue_shared/components/diff_viewer/diff_viewer.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
describe('DiffViewer', () => {
+ const requiredProps = {
+ diffMode: 'replaced',
+ diffViewerMode: 'image',
+ newPath: GREEN_BOX_IMAGE_URL,
+ newSha: 'ABC',
+ oldPath: RED_BOX_IMAGE_URL,
+ oldSha: 'DEF',
+ };
let vm;
function createComponent(props) {
const DiffViewer = Vue.extend(diffViewer);
+
vm = mountComponent(DiffViewer, props);
}
@@ -20,15 +30,11 @@ describe('DiffViewer', () => {
relative_url_root: '',
};
- createComponent({
- diffMode: 'replaced',
- diffViewerMode: 'image',
- newPath: GREEN_BOX_IMAGE_URL,
- newSha: 'ABC',
- oldPath: RED_BOX_IMAGE_URL,
- oldSha: 'DEF',
- projectPath: '',
- });
+ createComponent(
+ Object.assign({}, requiredProps, {
+ projectPath: '',
+ }),
+ );
setTimeout(() => {
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(
@@ -44,14 +50,13 @@ describe('DiffViewer', () => {
});
it('renders fallback download diff display', done => {
- createComponent({
- diffMode: 'replaced',
- diffViewerMode: 'added',
- newPath: 'test.abc',
- newSha: 'ABC',
- oldPath: 'testold.abc',
- oldSha: 'DEF',
- });
+ createComponent(
+ Object.assign({}, requiredProps, {
+ diffViewerMode: 'added',
+ newPath: 'test.abc',
+ oldPath: 'testold.abc',
+ }),
+ );
setTimeout(() => {
expect(vm.$el.querySelector('.deleted .file-info').textContent.trim()).toContain(
@@ -72,29 +77,28 @@ describe('DiffViewer', () => {
});
it('renders renamed component', () => {
- createComponent({
- diffMode: 'renamed',
- diffViewerMode: 'renamed',
- newPath: 'test.abc',
- newSha: 'ABC',
- oldPath: 'testold.abc',
- oldSha: 'DEF',
- });
+ createComponent(
+ Object.assign({}, requiredProps, {
+ diffMode: 'renamed',
+ diffViewerMode: 'renamed',
+ newPath: 'test.abc',
+ oldPath: 'testold.abc',
+ }),
+ );
expect(vm.$el.textContent).toContain('File moved');
});
it('renders mode changed component', () => {
- createComponent({
- diffMode: 'mode_changed',
- diffViewerMode: 'image',
- newPath: 'test.abc',
- newSha: 'ABC',
- oldPath: 'testold.abc',
- oldSha: 'DEF',
- aMode: '123',
- bMode: '321',
- });
+ createComponent(
+ Object.assign({}, requiredProps, {
+ diffMode: 'mode_changed',
+ newPath: 'test.abc',
+ oldPath: 'testold.abc',
+ aMode: '123',
+ bMode: '321',
+ }),
+ );
expect(vm.$el.textContent).toContain('File mode changed from 123 to 321');
});
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
index 97c870f27d9..0cb26d5000b 100644
--- a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
@@ -4,6 +4,11 @@ import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
describe('ImageDiffViewer', () => {
+ const requiredProps = {
+ diffMode: 'replaced',
+ newPath: GREEN_BOX_IMAGE_URL,
+ oldPath: RED_BOX_IMAGE_URL,
+ };
let vm;
function createComponent(props) {
@@ -45,11 +50,7 @@ describe('ImageDiffViewer', () => {
});
it('renders image diff for replaced', done => {
- createComponent({
- diffMode: 'replaced',
- newPath: GREEN_BOX_IMAGE_URL,
- oldPath: RED_BOX_IMAGE_URL,
- });
+ createComponent(requiredProps);
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
@@ -70,11 +71,12 @@ describe('ImageDiffViewer', () => {
});
it('renders image diff for new', done => {
- createComponent({
- diffMode: 'new',
- newPath: GREEN_BOX_IMAGE_URL,
- oldPath: '',
- });
+ createComponent(
+ Object.assign({}, requiredProps, {
+ diffMode: 'new',
+ oldPath: '',
+ }),
+ );
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
@@ -84,11 +86,12 @@ describe('ImageDiffViewer', () => {
});
it('renders image diff for deleted', done => {
- createComponent({
- diffMode: 'deleted',
- newPath: '',
- oldPath: RED_BOX_IMAGE_URL,
- });
+ createComponent(
+ Object.assign({}, requiredProps, {
+ diffMode: 'deleted',
+ newPath: '',
+ }),
+ );
setTimeout(() => {
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
@@ -119,11 +122,7 @@ describe('ImageDiffViewer', () => {
describe('swipeMode', () => {
beforeEach(done => {
- createComponent({
- diffMode: 'replaced',
- newPath: GREEN_BOX_IMAGE_URL,
- oldPath: RED_BOX_IMAGE_URL,
- });
+ createComponent(requiredProps);
setTimeout(() => {
done();
@@ -142,11 +141,7 @@ describe('ImageDiffViewer', () => {
describe('onionSkin', () => {
beforeEach(done => {
- createComponent({
- diffMode: 'replaced',
- newPath: GREEN_BOX_IMAGE_URL,
- oldPath: RED_BOX_IMAGE_URL,
- });
+ createComponent(requiredProps);
setTimeout(() => {
done();
diff --git a/spec/javascripts/vue_shared/components/icon_spec.js b/spec/javascripts/vue_shared/components/icon_spec.js
index 7390798afa8..ecaef414464 100644
--- a/spec/javascripts/vue_shared/components/icon_spec.js
+++ b/spec/javascripts/vue_shared/components/icon_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import Icon from '~/vue_shared/components/icon.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
describe('Sprite Icon Component', function() {
describe('Initialization', function() {
@@ -57,4 +58,16 @@ describe('Sprite Icon Component', function() {
expect(Icon.props.name.validator('commit')).toBe(true);
});
});
+
+ it('should call registered listeners when they are triggered', () => {
+ const clickHandler = jasmine.createSpy('clickHandler');
+ const wrapper = mount(Icon, {
+ propsData: { name: 'commit' },
+ listeners: { click: clickHandler },
+ });
+
+ wrapper.find('svg').trigger('click');
+
+ expect(clickHandler).toHaveBeenCalled();
+ });
});
diff --git a/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js b/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
index 9c2deca585b..323a0f03017 100644
--- a/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
+++ b/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
@@ -3,7 +3,7 @@ import _ from 'underscore';
import ProjectSelector from '~/vue_shared/components/project_selector/project_selector.vue';
import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue';
-import { GlSearchBoxByType } from '@gitlab/ui';
+import { GlSearchBoxByType, GlInfiniteScroll } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import { trimText } from 'spec/helpers/text_helper';
@@ -91,6 +91,13 @@ describe('ProjectSelector component', () => {
expect(searchInput.attributes('placeholder')).toBe('Search your projects');
});
+ it(`triggers a "bottomReached" event when user has scrolled to the bottom of the list`, () => {
+ spyOn(vm, '$emit');
+ wrapper.find(GlInfiniteScroll).vm.$emit('bottomReached');
+
+ expect(vm.$emit).toHaveBeenCalledWith('bottomReached');
+ });
+
it(`triggers a "projectClicked" event when a project is clicked`, () => {
spyOn(vm, '$emit');
wrapper.find(ProjectListItem).vm.$emit('click', _.first(searchResults));
diff --git a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js
deleted file mode 100644
index c5045afc5b0..00000000000
--- a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js
+++ /dev/null
@@ -1,120 +0,0 @@
-import Vue from 'vue';
-import { placeholderImage } from '~/lazy_loader';
-import userAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
-import mountComponent, { mountComponentWithSlots } from 'spec/helpers/vue_mount_component_helper';
-import defaultAvatarUrl from '~/../images/no_avatar.png';
-
-const DEFAULT_PROPS = {
- size: 99,
- imgSrc: 'myavatarurl.com',
- imgAlt: 'mydisplayname',
- cssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
-};
-
-describe('User Avatar Image Component', function() {
- let vm;
- let UserAvatarImage;
-
- beforeEach(() => {
- UserAvatarImage = Vue.extend(userAvatarImage);
- });
-
- describe('Initialization', function() {
- beforeEach(function() {
- vm = mountComponent(UserAvatarImage, {
- ...DEFAULT_PROPS,
- }).$mount();
- });
-
- it('should return a defined Vue component', function() {
- expect(vm).toBeDefined();
- });
-
- it('should have <img> as a child element', function() {
- const imageElement = vm.$el.querySelector('img');
-
- expect(imageElement).not.toBe(null);
- expect(imageElement.getAttribute('src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
- expect(imageElement.getAttribute('data-src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
- expect(imageElement.getAttribute('alt')).toBe(DEFAULT_PROPS.imgAlt);
- });
-
- it('should properly compute avatarSizeClass', function() {
- expect(vm.avatarSizeClass).toBe('s99');
- });
-
- it('should properly render img css', function() {
- const { classList } = vm.$el.querySelector('img');
- const containsAvatar = classList.contains('avatar');
- const containsSizeClass = classList.contains('s99');
- const containsCustomClass = classList.contains(DEFAULT_PROPS.cssClasses);
- const lazyClass = classList.contains('lazy');
-
- expect(containsAvatar).toBe(true);
- expect(containsSizeClass).toBe(true);
- expect(containsCustomClass).toBe(true);
- expect(lazyClass).toBe(false);
- });
- });
-
- describe('Initialization when lazy', function() {
- beforeEach(function() {
- vm = mountComponent(UserAvatarImage, {
- ...DEFAULT_PROPS,
- lazy: true,
- }).$mount();
- });
-
- it('should add lazy attributes', function() {
- const imageElement = vm.$el.querySelector('img');
- const lazyClass = imageElement.classList.contains('lazy');
-
- expect(lazyClass).toBe(true);
- expect(imageElement.getAttribute('src')).toBe(placeholderImage);
- expect(imageElement.getAttribute('data-src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
- });
- });
-
- describe('Initialization without src', function() {
- beforeEach(function() {
- vm = mountComponent(UserAvatarImage);
- });
-
- it('should have default avatar image', function() {
- const imageElement = vm.$el.querySelector('img');
-
- expect(imageElement.getAttribute('src')).toBe(defaultAvatarUrl);
- });
- });
-
- describe('dynamic tooltip content', () => {
- const props = DEFAULT_PROPS;
- const slots = {
- default: ['Action!'],
- };
-
- beforeEach(() => {
- vm = mountComponentWithSlots(UserAvatarImage, { props, slots }).$mount();
- });
-
- it('renders the tooltip slot', () => {
- expect(vm.$el.querySelector('.js-user-avatar-image-toolip')).not.toBe(null);
- });
-
- it('renders the tooltip content', () => {
- expect(vm.$el.querySelector('.js-user-avatar-image-toolip').textContent).toContain(
- slots.default[0],
- );
- });
-
- it('does not render tooltip data attributes for on avatar image', () => {
- const avatarImg = vm.$el.querySelector('img');
-
- expect(avatarImg.dataset.originalTitle).not.toBeDefined();
- expect(avatarImg.dataset.placement).not.toBeDefined();
- expect(avatarImg.dataset.container).not.toBeDefined();
- });
- });
-});
diff --git a/spec/javascripts/vue_shared/components/user_popover/user_popover_spec.js b/spec/javascripts/vue_shared/components/user_popover/user_popover_spec.js
deleted file mode 100644
index c7e0d806d80..00000000000
--- a/spec/javascripts/vue_shared/components/user_popover/user_popover_spec.js
+++ /dev/null
@@ -1,167 +0,0 @@
-import Vue from 'vue';
-import userPopover from '~/vue_shared/components/user_popover/user_popover.vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-
-const DEFAULT_PROPS = {
- loaded: true,
- user: {
- username: 'root',
- name: 'Administrator',
- location: 'Vienna',
- bio: null,
- organization: null,
- status: null,
- },
-};
-
-const UserPopover = Vue.extend(userPopover);
-
-describe('User Popover Component', () => {
- const fixtureTemplate = 'merge_requests/diff_comment.html';
- preloadFixtures(fixtureTemplate);
-
- let vm;
-
- beforeEach(() => {
- loadFixtures(fixtureTemplate);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('Empty', () => {
- beforeEach(() => {
- vm = mountComponent(UserPopover, {
- target: document.querySelector('.js-user-link'),
- user: {
- name: null,
- username: null,
- location: null,
- bio: null,
- organization: null,
- status: null,
- },
- });
- });
-
- it('should return skeleton loaders', () => {
- expect(vm.$el.querySelectorAll('.animation-container').length).toBe(4);
- });
- });
-
- describe('basic data', () => {
- it('should show basic fields', () => {
- vm = mountComponent(UserPopover, {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- });
-
- expect(vm.$el.textContent).toContain(DEFAULT_PROPS.user.name);
- expect(vm.$el.textContent).toContain(DEFAULT_PROPS.user.username);
- expect(vm.$el.textContent).toContain(DEFAULT_PROPS.user.location);
- });
-
- it('shows icon for location', () => {
- const iconEl = vm.$el.querySelector('.js-location svg');
-
- expect(iconEl.querySelector('use').getAttribute('xlink:href')).toContain('location');
- });
- });
-
- describe('job data', () => {
- it('should show only bio if no organization is available', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.bio = 'Engineer';
-
- vm = mountComponent(UserPopover, {
- ...testProps,
- target: document.querySelector('.js-user-link'),
- });
-
- expect(vm.$el.textContent).toContain('Engineer');
- });
-
- it('should show only organization if no bio is available', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.organization = 'GitLab';
-
- vm = mountComponent(UserPopover, {
- ...testProps,
- target: document.querySelector('.js-user-link'),
- });
-
- expect(vm.$el.textContent).toContain('GitLab');
- });
-
- it('should display bio and organization in separate lines', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.bio = 'Engineer';
- testProps.user.organization = 'GitLab';
-
- vm = mountComponent(UserPopover, {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- });
-
- expect(vm.$el.querySelector('.js-bio').textContent).toContain('Engineer');
- expect(vm.$el.querySelector('.js-organization').textContent).toContain('GitLab');
- });
-
- it('should not encode special characters in bio and organization', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.bio = 'Manager & Team Lead';
- testProps.user.organization = 'Me & my <funky> Company';
-
- vm = mountComponent(UserPopover, {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- });
-
- expect(vm.$el.querySelector('.js-bio').textContent).toContain('Manager & Team Lead');
- expect(vm.$el.querySelector('.js-organization').textContent).toContain(
- 'Me & my <funky> Company',
- );
- });
-
- it('shows icon for bio', () => {
- const iconEl = vm.$el.querySelector('.js-bio svg');
-
- expect(iconEl.querySelector('use').getAttribute('xlink:href')).toContain('profile');
- });
-
- it('shows icon for organization', () => {
- const iconEl = vm.$el.querySelector('.js-organization svg');
-
- expect(iconEl.querySelector('use').getAttribute('xlink:href')).toContain('work');
- });
- });
-
- describe('status data', () => {
- it('should show only message', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.status = { message_html: 'Hello World' };
-
- vm = mountComponent(UserPopover, {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- });
-
- expect(vm.$el.textContent).toContain('Hello World');
- });
-
- it('should show message and emoji', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.status = { emoji: 'basketball_player', message_html: 'Hello World' };
-
- vm = mountComponent(UserPopover, {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- status: { emoji: 'basketball_player', message_html: 'Hello World' },
- });
-
- expect(vm.$el.textContent).toContain('Hello World');
- expect(vm.$el.innerHTML).toContain('<gl-emoji data-name="basketball_player"');
- });
- });
-});
diff --git a/spec/lib/api/helpers/pagination_spec.rb b/spec/lib/api/helpers/pagination_spec.rb
index b57adb46385..040ff1a8ebe 100644
--- a/spec/lib/api/helpers/pagination_spec.rb
+++ b/spec/lib/api/helpers/pagination_spec.rb
@@ -3,399 +3,20 @@
require 'spec_helper'
describe API::Helpers::Pagination do
- let(:resource) { Project.all }
- let(:custom_port) { 8080 }
- let(:incoming_api_projects_url) { "#{Gitlab.config.gitlab.url}:#{custom_port}/api/v4/projects" }
+ subject { Class.new.include(described_class).new }
- before do
- stub_config_setting(port: custom_port)
- end
-
- subject do
- Class.new.include(described_class).new
- end
-
- describe '#paginate (keyset pagination)' do
- let(:value) { spy('return value') }
- let(:base_query) do
- {
- pagination: 'keyset',
- foo: 'bar',
- bar: 'baz'
- }
- end
- let(:query) { base_query }
-
- before do
- allow(subject).to receive(:header).and_return(value)
- allow(subject).to receive(:params).and_return(query)
- allow(subject).to receive(:request).and_return(double(url: "#{incoming_api_projects_url}?#{query.to_query}"))
- end
-
- context 'when resource can be paginated' do
- let!(:projects) do
- [
- create(:project, name: 'One'),
- create(:project, name: 'Two'),
- create(:project, name: 'Three')
- ].sort_by { |e| -e.id } # sort by id desc (this is the default sort order for the API)
- end
-
- describe 'first page' do
- let(:query) { base_query.merge(per_page: 2) }
-
- it 'returns appropriate amount of resources' do
- expect(subject.paginate(resource).count).to eq 2
- end
-
- it 'returns the first two records (by id desc)' do
- expect(subject.paginate(resource)).to eq(projects[0..1])
- end
-
- it 'adds appropriate headers' do
- expect_header('X-Per-Page', '2')
- expect_header('X-Next-Page', "#{incoming_api_projects_url}?#{query.merge(ks_prev_id: projects[1].id).to_query}")
-
- expect_header('Link', anything) do |_key, val|
- expect(val).to include('rel="next"')
- end
-
- subject.paginate(resource)
- end
- end
-
- describe 'second page' do
- let(:query) { base_query.merge(per_page: 2, ks_prev_id: projects[1].id) }
-
- it 'returns appropriate amount of resources' do
- expect(subject.paginate(resource).count).to eq 1
- end
-
- it 'returns the third record' do
- expect(subject.paginate(resource)).to eq(projects[2..2])
- end
-
- it 'adds appropriate headers' do
- expect_header('X-Per-Page', '2')
- expect_header('X-Next-Page', "#{incoming_api_projects_url}?#{query.merge(ks_prev_id: projects[2].id).to_query}")
-
- expect_header('Link', anything) do |_key, val|
- expect(val).to include('rel="next"')
- end
-
- subject.paginate(resource)
- end
- end
-
- describe 'third page' do
- let(:query) { base_query.merge(per_page: 2, ks_prev_id: projects[2].id) }
-
- it 'returns appropriate amount of resources' do
- expect(subject.paginate(resource).count).to eq 0
- end
-
- it 'adds appropriate headers' do
- expect_header('X-Per-Page', '2')
- expect_no_header('X-Next-Page')
- expect(subject).not_to receive(:header).with('Link')
-
- subject.paginate(resource)
- end
- end
-
- context 'if order' do
- context 'is not present' do
- let(:query) { base_query.merge(per_page: 2) }
-
- it 'is not present it adds default order(:id) desc' do
- resource.order_values = []
-
- paginated_relation = subject.paginate(resource)
-
- expect(resource.order_values).to be_empty
- expect(paginated_relation.order_values).to be_present
- expect(paginated_relation.order_values.size).to eq(1)
- expect(paginated_relation.order_values.first).to be_descending
- expect(paginated_relation.order_values.first.expr.name).to eq 'id'
- end
- end
-
- context 'is present' do
- let(:resource) { Project.all.order(name: :desc) }
- let!(:projects) do
- [
- create(:project, name: 'One'),
- create(:project, name: 'Two'),
- create(:project, name: 'Three'),
- create(:project, name: 'Three'), # Note the duplicate name
- create(:project, name: 'Four'),
- create(:project, name: 'Five'),
- create(:project, name: 'Six')
- ]
-
- # if we sort this by name descending, id descending, this yields:
- # {
- # 2 => "Two",
- # 4 => "Three",
- # 3 => "Three",
- # 7 => "Six",
- # 1 => "One",
- # 5 => "Four",
- # 6 => "Five"
- # }
- #
- # (key is the id)
- end
-
- it 'also orders by primary key' do
- paginated_relation = subject.paginate(resource)
-
- expect(paginated_relation.order_values).to be_present
- expect(paginated_relation.order_values.size).to eq(2)
- expect(paginated_relation.order_values.first).to be_descending
- expect(paginated_relation.order_values.first.expr.name).to eq 'name'
- expect(paginated_relation.order_values.second).to be_descending
- expect(paginated_relation.order_values.second.expr.name).to eq 'id'
- end
-
- it 'returns the right records (first page)' do
- result = subject.paginate(resource)
-
- expect(result.first).to eq(projects[1])
- expect(result.second).to eq(projects[3])
- end
-
- describe 'second page' do
- let(:query) { base_query.merge(ks_prev_id: projects[3].id, ks_prev_name: projects[3].name, per_page: 2) }
-
- it 'returns the right records (second page)' do
- result = subject.paginate(resource)
-
- expect(result.first).to eq(projects[2])
- expect(result.second).to eq(projects[6])
- end
-
- it 'returns the right link to the next page' do
- expect_header('X-Per-Page', '2')
- expect_header('X-Next-Page', "#{incoming_api_projects_url}?#{query.merge(ks_prev_id: projects[6].id, ks_prev_name: projects[6].name).to_query}")
- expect_header('Link', anything) do |_key, val|
- expect(val).to include('rel="next"')
- end
-
- subject.paginate(resource)
- end
- end
-
- describe 'third page' do
- let(:query) { base_query.merge(ks_prev_id: projects[6].id, ks_prev_name: projects[6].name, per_page: 5) }
-
- it 'returns the right records (third page), note increased per_page' do
- result = subject.paginate(resource)
-
- expect(result.size).to eq(3)
- expect(result.first).to eq(projects[0])
- expect(result.second).to eq(projects[4])
- expect(result.last).to eq(projects[5])
- end
- end
- end
- end
- end
- end
-
- describe '#paginate (default offset-based pagination)' do
- let(:value) { spy('return value') }
- let(:base_query) { { foo: 'bar', bar: 'baz' } }
- let(:query) { base_query }
-
- before do
- allow(subject).to receive(:header).and_return(value)
- allow(subject).to receive(:params).and_return(query)
- allow(subject).to receive(:request).and_return(double(url: "#{incoming_api_projects_url}?#{query.to_query}"))
- end
-
- context 'when resource can be paginated' do
- before do
- create_list(:project, 3)
- end
-
- describe 'first page' do
- shared_examples 'response with pagination headers' do
- it 'adds appropriate headers' do
- expect_header('X-Total', '3')
- expect_header('X-Total-Pages', '2')
- expect_header('X-Per-Page', '2')
- expect_header('X-Page', '1')
- expect_header('X-Next-Page', '2')
- expect_header('X-Prev-Page', '')
-
- expect_header('Link', anything) do |_key, val|
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="last"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next"))
- expect(val).not_to include('rel="prev"')
- end
-
- subject.paginate(resource)
- end
- end
-
- shared_examples 'paginated response' do
- it 'returns appropriate amount of resources' do
- expect(subject.paginate(resource).count).to eq 2
- end
-
- it 'executes only one SELECT COUNT query' do
- expect { subject.paginate(resource) }.to make_queries_matching(/SELECT COUNT/, 1)
- end
- end
-
- let(:query) { base_query.merge(page: 1, per_page: 2) }
-
- context 'when the api_kaminari_count_with_limit feature flag is unset' do
- it_behaves_like 'paginated response'
- it_behaves_like 'response with pagination headers'
- end
-
- context 'when the api_kaminari_count_with_limit feature flag is disabled' do
- before do
- stub_feature_flags(api_kaminari_count_with_limit: false)
- end
-
- it_behaves_like 'paginated response'
- it_behaves_like 'response with pagination headers'
- end
-
- context 'when the api_kaminari_count_with_limit feature flag is enabled' do
- before do
- stub_feature_flags(api_kaminari_count_with_limit: true)
- end
-
- context 'when resources count is less than MAX_COUNT_LIMIT' do
- before do
- stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 4)
- end
-
- it_behaves_like 'paginated response'
- it_behaves_like 'response with pagination headers'
- end
-
- context 'when resources count is more than MAX_COUNT_LIMIT' do
- before do
- stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 2)
- end
-
- it_behaves_like 'paginated response'
-
- it 'does not return the X-Total and X-Total-Pages headers' do
- expect_no_header('X-Total')
- expect_no_header('X-Total-Pages')
- expect_header('X-Per-Page', '2')
- expect_header('X-Page', '1')
- expect_header('X-Next-Page', '2')
- expect_header('X-Prev-Page', '')
+ describe '#paginate' do
+ let(:relation) { double("relation") }
+ let(:offset_pagination) { double("offset pagination") }
+ let(:expected_result) { double("result") }
- expect_header('Link', anything) do |_key, val|
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next"))
- expect(val).not_to include('rel="last"')
- expect(val).not_to include('rel="prev"')
- end
+ it 'delegates to OffsetPagination' do
+ expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination)
+ expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result)
- subject.paginate(resource)
- end
- end
- end
- end
+ result = subject.paginate(relation)
- describe 'second page' do
- let(:query) { base_query.merge(page: 2, per_page: 2) }
-
- it 'returns appropriate amount of resources' do
- expect(subject.paginate(resource).count).to eq 1
- end
-
- it 'adds appropriate headers' do
- expect_header('X-Total', '3')
- expect_header('X-Total-Pages', '2')
- expect_header('X-Per-Page', '2')
- expect_header('X-Page', '2')
- expect_header('X-Next-Page', '')
- expect_header('X-Prev-Page', '1')
-
- expect_header('Link', anything) do |_key, val|
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="last"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="prev"))
- expect(val).not_to include('rel="next"')
- end
-
- subject.paginate(resource)
- end
- end
-
- context 'if order' do
- it 'is not present it adds default order(:id) if no order is present' do
- resource.order_values = []
-
- paginated_relation = subject.paginate(resource)
-
- expect(resource.order_values).to be_empty
- expect(paginated_relation.order_values).to be_present
- expect(paginated_relation.order_values.first).to be_ascending
- expect(paginated_relation.order_values.first.expr.name).to eq 'id'
- end
-
- it 'is present it does not add anything' do
- paginated_relation = subject.paginate(resource.order(created_at: :desc))
-
- expect(paginated_relation.order_values).to be_present
- expect(paginated_relation.order_values.first).to be_descending
- expect(paginated_relation.order_values.first.expr.name).to eq 'created_at'
- end
- end
+ expect(result).to eq(expected_result)
end
-
- context 'when resource empty' do
- describe 'first page' do
- let(:query) { base_query.merge(page: 1, per_page: 2) }
-
- it 'returns appropriate amount of resources' do
- expect(subject.paginate(resource).count).to eq 0
- end
-
- it 'adds appropriate headers' do
- expect_header('X-Total', '0')
- expect_header('X-Total-Pages', '1')
- expect_header('X-Per-Page', '2')
- expect_header('X-Page', '1')
- expect_header('X-Next-Page', '')
- expect_header('X-Prev-Page', '')
-
- expect_header('Link', anything) do |_key, val|
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="last"))
- expect(val).not_to include('rel="prev"')
- expect(val).not_to include('rel="next"')
- expect(val).not_to include('page=0')
- end
-
- subject.paginate(resource)
- end
- end
- end
- end
-
- def expect_header(*args, &block)
- expect(subject).to receive(:header).with(*args, &block)
- end
-
- def expect_no_header(*args, &block)
- expect(subject).not_to receive(:header).with(*args)
- end
-
- def expect_message(method)
- expect(subject).to receive(method)
- .at_least(:once).and_return(value)
end
end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 0624c25e734..81c4563feb6 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -174,4 +174,18 @@ describe API::Helpers do
end
end
end
+
+ describe '#track_event' do
+ it "creates a gitlab tracking event" do
+ expect(Gitlab::Tracking).to receive(:event).with('foo', 'my_event', {})
+
+ subject.track_event('my_event', category: 'foo')
+ end
+
+ it "logs an exception" do
+ expect(Rails.logger).to receive(:warn).with(/Tracking event failed/)
+
+ subject.track_event('my_event', category: nil)
+ end
+ end
end
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
index bf827fb3914..5f120f258cd 100644
--- a/spec/lib/backup/repository_spec.rb
+++ b/spec/lib/backup/repository_spec.rb
@@ -70,7 +70,7 @@ describe Backup::Repository do
end
context 'restoring object pools' do
- it 'schedules restoring of the pool' do
+ it 'schedules restoring of the pool', :sidekiq_might_not_need_inline do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
diff --git a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
index 0c4ccbf28f4..ff2346fe1ba 100644
--- a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
+++ b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Banzai::Filter::AssetProxyFilter do
diff --git a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
new file mode 100644
index 00000000000..fd6f8816b63
--- /dev/null
+++ b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::InlineGrafanaMetricsFilter do
+ include FilterSpecHelper
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
+
+ let(:input) { %(<a href="#{url}">example</a>) }
+ let(:doc) { filter(input) }
+
+ let(:url) { grafana_integration.grafana_url + dashboard_path }
+ let(:dashboard_path) do
+ '/d/XDaNK6amz/gitlab-omnibus-redis' \
+ '?from=1570397739557&to=1570484139557' \
+ '&var-instance=All&panelId=14'
+ end
+
+ it 'appends a metrics charts placeholder with dashboard url after metrics links' do
+ node = doc.at_css('.js-render-metrics')
+ expect(node).to be_present
+
+ dashboard_url = urls.project_grafana_api_metrics_dashboard_url(
+ project,
+ embedded: true,
+ grafana_url: url,
+ start: "2019-10-06T21:35:39Z",
+ end: "2019-10-07T21:35:39Z"
+ )
+
+ expect(node.attribute('data-dashboard-url').to_s).to eq(dashboard_url)
+ end
+
+ context 'when the dashboard link is part of a paragraph' do
+ let(:paragraph) { %(This is an <a href="#{url}">example</a> of metrics.) }
+ let(:input) { %(<p>#{paragraph}</p>) }
+
+ it 'appends the charts placeholder after the enclosing paragraph' do
+ expect(unescape(doc.at_css('p').to_s)).to include(paragraph)
+ expect(doc.at_css('.js-render-metrics')).to be_present
+ end
+ end
+
+ context 'when grafana is not configured' do
+ before do
+ allow(project).to receive(:grafana_integration).and_return(nil)
+ end
+
+ it 'leaves the markdown unchanged' do
+ expect(unescape(doc.to_s)).to eq(input)
+ end
+ end
+
+ context 'when parameters are missing' do
+ let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis' }
+
+ it 'leaves the markdown unchanged' do
+ expect(unescape(doc.to_s)).to eq(input)
+ end
+ end
+
+ private
+
+ # Nokogiri escapes the URLs, but we don't care about that
+ # distinction for the purposes of this filter
+ def unescape(html)
+ CGI.unescapeHTML(html)
+ end
+end
diff --git a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
index a99cd7d6076..745b9133529 100644
--- a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
@@ -18,30 +18,48 @@ describe Banzai::Filter::InlineMetricsRedactorFilter do
end
context 'with a metrics charts placeholder' do
- let(:input) { %(<div class="js-render-metrics" data-dashboard-url="#{url}"></div>) }
+ shared_examples_for 'a supported metrics dashboard url' do
+ context 'no user is logged in' do
+ it 'redacts the placeholder' do
+ expect(doc.to_s).to be_empty
+ end
+ end
- context 'no user is logged in' do
- it 'redacts the placeholder' do
- expect(doc.to_s).to be_empty
+ context 'the user does not have permission do see charts' do
+ let(:doc) { filter(input, current_user: build(:user)) }
+
+ it 'redacts the placeholder' do
+ expect(doc.to_s).to be_empty
+ end
end
- end
- context 'the user does not have permission do see charts' do
- let(:doc) { filter(input, current_user: build(:user)) }
+ context 'the user has requisite permissions' do
+ let(:user) { create(:user) }
+ let(:doc) { filter(input, current_user: user) }
- it 'redacts the placeholder' do
- expect(doc.to_s).to be_empty
+ it 'leaves the placeholder' do
+ project.add_maintainer(user)
+
+ expect(doc.to_s).to eq input
+ end
end
end
- context 'the user has requisite permissions' do
- let(:user) { create(:user) }
- let(:doc) { filter(input, current_user: user) }
+ let(:input) { %(<div class="js-render-metrics" data-dashboard-url="#{url}"></div>) }
- it 'leaves the placeholder' do
- project.add_maintainer(user)
+ it_behaves_like 'a supported metrics dashboard url'
+
+ context 'for a grafana dashboard' do
+ let(:url) { urls.project_grafana_api_metrics_dashboard_url(project, embedded: true) }
+
+ it_behaves_like 'a supported metrics dashboard url'
+ end
- expect(doc.to_s).to eq input
+ context 'for an internal non-dashboard url' do
+ let(:url) { urls.project_url(project) }
+
+ it 'leaves the placeholder' do
+ expect(doc.to_s).to be_empty
end
end
end
diff --git a/spec/lib/banzai/filter/video_link_filter_spec.rb b/spec/lib/banzai/filter/video_link_filter_spec.rb
index a395b021f32..c324c36fe4d 100644
--- a/spec/lib/banzai/filter/video_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/video_link_filter_spec.rb
@@ -32,7 +32,7 @@ describe Banzai::Filter::VideoLinkFilter do
expect(video.name).to eq 'video'
expect(video['src']).to eq src
- expect(video['width']).to eq "100%"
+ expect(video['width']).to eq "400"
expect(paragraph.name).to eq 'p'
diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb
index 70b51b8efec..6a9df0e5099 100644
--- a/spec/lib/bitbucket/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket/representation/pull_request_spec.rb
@@ -20,6 +20,7 @@ describe Bitbucket::Representation::PullRequest do
describe '#state' do
it { expect(described_class.new({ 'state' => 'MERGED' }).state).to eq('merged') }
it { expect(described_class.new({ 'state' => 'DECLINED' }).state).to eq('closed') }
+ it { expect(described_class.new({ 'state' => 'SUPERSEDED' }).state).to eq('closed') }
it { expect(described_class.new({}).state).to eq('opened') }
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index 3782c30e88a..a493b96b1e4 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -99,8 +99,8 @@ describe ContainerRegistry::Client do
stub_upload('path', 'content', 'sha256:123', 400)
end
- it 'returns nil' do
- expect(subject).to be nil
+ it 'returns a failure' do
+ expect(subject).not_to be_success
end
end
end
@@ -125,6 +125,14 @@ describe ContainerRegistry::Client do
expect(subject).to eq(result_manifest)
end
+
+ context 'when upload fails' do
+ before do
+ stub_upload('path', "{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', 500)
+ end
+
+ it { is_expected.to be nil }
+ end
end
describe '#put_tag' do
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 7c65525b8dc..415a6e62374 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -58,7 +58,7 @@ module Gitlab
},
'image with onerror' => {
input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]',
- output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt='Alt text\" onerror=\"alert(7)'></span></p>\n</div>"
+ output: "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt='Alt text\" onerror=\"alert(7)' class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
},
'fenced code with inline script' => {
input: '```mypre"><script>alert(3)</script>',
@@ -73,6 +73,20 @@ module Gitlab
end
end
+ context "images" do
+ it "does lazy load and link image" do
+ input = 'image:https://localhost.com/image.png[]'
+ output = "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
+ expect(render(input, context)).to include(output)
+ end
+
+ it "does not automatically link image if link is explicitly defined" do
+ input = 'image:https://localhost.com/image.png[link=https://gitlab.com]'
+ output = "<div>\n<p><span><a href=\"https://gitlab.com\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
+ expect(render(input, context)).to include(output)
+ end
+ end
+
context 'with admonition' do
it 'preserves classes' do
input = <<~ADOC
@@ -107,7 +121,7 @@ module Gitlab
ADOC
output = <<~HTML
- <h2>Title</h2>
+ <h2>Title</h2>
HTML
expect(render(input, context)).to include(output.strip)
@@ -149,15 +163,15 @@ module Gitlab
ADOC
output = <<~HTML
- <div>
- <p>This paragraph has a footnote.<sup>[<a id="_footnoteref_1" href="#_footnotedef_1" title="View footnote.">1</a>]</sup></p>
- </div>
- <div>
- <hr>
- <div id="_footnotedef_1">
- <a href="#_footnoteref_1">1</a>. This is the text of the footnote.
- </div>
- </div>
+ <div>
+ <p>This paragraph has a footnote.<sup>[<a id="_footnoteref_1" href="#_footnotedef_1" title="View footnote.">1</a>]</sup></p>
+ </div>
+ <div>
+ <hr>
+ <div id="_footnotedef_1">
+ <a href="#_footnoteref_1">1</a>. This is the text of the footnote.
+ </div>
+ </div>
HTML
expect(render(input, context)).to include(output.strip)
@@ -183,34 +197,34 @@ module Gitlab
ADOC
output = <<~HTML
- <h1>Title</h1>
- <div>
- <h2 id="user-content-first-section">
- <a class="anchor" href="#user-content-first-section"></a>First section</h2>
- <div>
- <div>
- <p>This is the first section.</p>
- </div>
- </div>
- </div>
- <div>
- <h2 id="user-content-second-section">
- <a class="anchor" href="#user-content-second-section"></a>Second section</h2>
- <div>
- <div>
- <p>This is the second section.</p>
- </div>
- </div>
- </div>
- <div>
- <h2 id="user-content-thunder">
- <a class="anchor" href="#user-content-thunder"></a>Thunder âš¡ !</h2>
- <div>
- <div>
- <p>This is the third section.</p>
- </div>
- </div>
- </div>
+ <h1>Title</h1>
+ <div>
+ <h2 id="user-content-first-section">
+ <a class="anchor" href="#user-content-first-section"></a>First section</h2>
+ <div>
+ <div>
+ <p>This is the first section.</p>
+ </div>
+ </div>
+ </div>
+ <div>
+ <h2 id="user-content-second-section">
+ <a class="anchor" href="#user-content-second-section"></a>Second section</h2>
+ <div>
+ <div>
+ <p>This is the second section.</p>
+ </div>
+ </div>
+ </div>
+ <div>
+ <h2 id="user-content-thunder">
+ <a class="anchor" href="#user-content-thunder"></a>Thunder âš¡ !</h2>
+ <div>
+ <div>
+ <p>This is the third section.</p>
+ </div>
+ </div>
+ </div>
HTML
expect(render(input, context)).to include(output.strip)
diff --git a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
index 05541972f87..adb8e138ca7 100644
--- a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Auth::LDAP::AuthHash do
@@ -91,7 +93,7 @@ describe Gitlab::Auth::LDAP::AuthHash do
let(:given_uid) { 'uid=John Smith,ou=People,dc=example,dc=com' }
before do
- raw_info[:uid] = ['JOHN']
+ raw_info[:uid] = [+'JOHN']
end
it 'enabled the username attribute is lower cased' do
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index 577dfe51949..e4a90d4018d 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -535,4 +535,23 @@ AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
end
end
end
+
+ describe 'sign_in_enabled?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:enabled, :prevent_ldap_sign_in, :result) do
+ true | false | true
+ 'true' | false | true
+ true | true | false
+ false | nil | false
+ end
+
+ with_them do
+ it do
+ stub_ldap_setting(enabled: enabled, prevent_ldap_sign_in: prevent_ldap_sign_in)
+
+ expect(described_class.sign_in_enabled?).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/ldap/person_spec.rb b/spec/lib/gitlab/auth/ldap/person_spec.rb
index 1527fe60fb9..985732e69f9 100644
--- a/spec/lib/gitlab/auth/ldap/person_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/person_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Auth::LDAP::Person do
@@ -135,7 +137,7 @@ describe Gitlab::Auth::LDAP::Person do
let(:username_attribute) { 'uid' }
before do
- entry[username_attribute] = 'JOHN'
+ entry[username_attribute] = +'JOHN'
@person = described_class.new(entry, 'ldapmain')
end
diff --git a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
index c1eaf1d3433..f2de73d5aea 100644
--- a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
@@ -91,15 +91,26 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover do
end
end
- context 'when no model found for the upload' do
+ context 'when no note found for the upload' do
before do
- legacy_upload.model = nil
+ legacy_upload.model_id = nil
+ legacy_upload.model_type = 'Note'
expect_error_log
end
it_behaves_like 'legacy upload deletion'
end
+ context 'when upload does not belong to a note' do
+ before do
+ legacy_upload.model = create(:appearance)
+ end
+
+ it 'does not remove the upload' do
+ expect { described_class.new(legacy_upload).execute }.not_to change { Upload.count }
+ end
+ end
+
context 'when the upload move fails' do
before do
expect(FileUploader).to receive(:copy_to).and_raise('failed')
diff --git a/spec/lib/gitlab/background_migration/legacy_uploads_migrator_spec.rb b/spec/lib/gitlab/background_migration/legacy_uploads_migrator_spec.rb
index cabca3dbef9..85187d039c1 100644
--- a/spec/lib/gitlab/background_migration/legacy_uploads_migrator_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_uploads_migrator_spec.rb
@@ -35,6 +35,8 @@ describe Gitlab::BackgroundMigration::LegacyUploadsMigrator do
let!(:legacy_upload_no_file) { create_upload(note2, false) }
let!(:legacy_upload_legacy_project) { create_upload(note_legacy) }
+ let!(:appearance) { create(:appearance, :with_logo) }
+
let(:start_id) { 1 }
let(:end_id) { 10000 }
@@ -52,12 +54,18 @@ describe Gitlab::BackgroundMigration::LegacyUploadsMigrator do
expect(File.exist?(legacy_upload_legacy_project.absolute_path)).to be_falsey
end
- it 'removes all AttachmentUploader records' do
- expect { subject }.to change { Upload.where(uploader: 'AttachmentUploader').count }.from(3).to(0)
+ it 'removes all Note AttachmentUploader records' do
+ expect { subject }.to change { Upload.where(uploader: 'AttachmentUploader').count }.from(4).to(1)
end
it 'creates new uploads for successfully migrated records' do
expect { subject }.to change { Upload.where(uploader: 'FileUploader').count }.from(0).to(2)
end
+
+ it 'does not remove appearance uploads' do
+ subject
+
+ expect(appearance.logo.file).to exist
+ end
end
# rubocop: enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb b/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
index f877e8cc1b8..399db4ac259 100644
--- a/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
+++ b/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
@@ -33,7 +33,7 @@ describe ScheduleCalculateWikiSizes, :migration, :sidekiq do
end
end
- it 'calculates missing wiki sizes' do
+ it 'calculates missing wiki sizes', :sidekiq_might_not_need_inline do
expect(project_statistics.find_by(id: 2).wiki_size).to be_nil
expect(project_statistics.find_by(id: 3).wiki_size).to be_nil
diff --git a/spec/lib/gitlab/badge/pipeline/status_spec.rb b/spec/lib/gitlab/badge/pipeline/status_spec.rb
index 684c6829879..ab8d1f0ec5b 100644
--- a/spec/lib/gitlab/badge/pipeline/status_spec.rb
+++ b/spec/lib/gitlab/badge/pipeline/status_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::Badge::Pipeline::Status do
end
end
- context 'pipeline exists' do
+ context 'pipeline exists', :sidekiq_might_not_need_inline do
let!(:pipeline) { create_pipeline(project, sha, branch) }
context 'pipeline success' do
diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
index 2fb9f1a0a08..ddb1d3cea21 100644
--- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
@@ -90,7 +90,7 @@ describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
hook_path = File.join(repo_path, 'hooks')
expect(gitlab_shell.repository_exists?(project.repository_storage, repo_path)).to be(true)
- expect(gitlab_shell.exists?(project.repository_storage, hook_path)).to be(true)
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, hook_path)).to be(true)
end
context 'hashed storage enabled' do
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 7f7a285c453..b0d07c6e0b0 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -158,6 +158,7 @@ describe Gitlab::BitbucketImport::Importer do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
+ expect(merge_request.state).to eq('merged')
expect(merge_request.notes.count).to eq(2)
expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index 88e8f5d74d1..505f117034e 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -58,7 +58,7 @@ describe Gitlab::Checks::LfsIntegrity do
end
end
- context 'for forked project' do
+ context 'for forked project', :sidekiq_might_not_need_inline do
let(:parent_project) { create(:project, :repository) }
let(:project) { fork_project(parent_project, nil, repository: true) }
diff --git a/spec/lib/gitlab/ci/ansi2json/style_spec.rb b/spec/lib/gitlab/ci/ansi2json/style_spec.rb
index 88a0ca35859..5110c215415 100644
--- a/spec/lib/gitlab/ci/ansi2json/style_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json/style_spec.rb
@@ -143,6 +143,7 @@ describe Gitlab::Ci::Ansi2json::Style do
[[], %w[106], 'term-bg-l-cyan', 'sets bg color light cyan'],
[[], %w[107], 'term-bg-l-white', 'sets bg color light white'],
# reset
+ [%w[1], %w[], '', 'resets style from format bold'],
[%w[1], %w[0], '', 'resets style from format bold'],
[%w[1 3], %w[0], '', 'resets style from format bold and italic'],
[%w[1 3 term-fg-l-red term-bg-yellow], %w[0], '', 'resets all formats and colors'],
diff --git a/spec/lib/gitlab/ci/ansi2json_spec.rb b/spec/lib/gitlab/ci/ansi2json_spec.rb
index 3c6bc46436b..124379fa321 100644
--- a/spec/lib/gitlab/ci/ansi2json_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json_spec.rb
@@ -12,11 +12,26 @@ describe Gitlab::Ci::Ansi2json do
])
end
- it 'adds new line in a separate element' do
- expect(convert_json("Hello\nworld")).to eq([
- { offset: 0, content: [{ text: 'Hello' }] },
- { offset: 6, content: [{ text: 'world' }] }
- ])
+ context 'new lines' do
+ it 'adds new line when encountering \n' do
+ expect(convert_json("Hello\nworld")).to eq([
+ { offset: 0, content: [{ text: 'Hello' }] },
+ { offset: 6, content: [{ text: 'world' }] }
+ ])
+ end
+
+ it 'adds new line when encountering \r\n' do
+ expect(convert_json("Hello\r\nworld")).to eq([
+ { offset: 0, content: [{ text: 'Hello' }] },
+ { offset: 7, content: [{ text: 'world' }] }
+ ])
+ end
+
+ it 'replace the current line when encountering \r' do
+ expect(convert_json("Hello\rworld")).to eq([
+ { offset: 0, content: [{ text: 'world' }] }
+ ])
+ end
end
it 'recognizes color changing ANSI sequences' do
@@ -113,10 +128,6 @@ describe Gitlab::Ci::Ansi2json do
content: [],
section_duration: '01:03',
section: 'prepare-script'
- },
- {
- offset: 63,
- content: []
}
])
end
@@ -134,10 +145,6 @@ describe Gitlab::Ci::Ansi2json do
content: [],
section: 'prepare-script',
section_duration: '01:03'
- },
- {
- offset: 56,
- content: []
}
])
end
@@ -157,7 +164,7 @@ describe Gitlab::Ci::Ansi2json do
section_duration: '01:03'
},
{
- offset: 49,
+ offset: 91,
content: [{ text: 'world' }]
}
])
@@ -198,7 +205,7 @@ describe Gitlab::Ci::Ansi2json do
expect(convert_json("#{section_start}hello")).to eq([
{
offset: 0,
- content: [{ text: "#{section_start.gsub("\033[0K", '')}hello" }]
+ content: [{ text: 'hello' }]
}
])
end
@@ -211,30 +218,26 @@ describe Gitlab::Ci::Ansi2json do
expect(convert_json("#{section_start}hello")).to eq([
{
offset: 0,
- content: [{ text: "#{section_start.gsub("\033[0K", '').gsub('<', '&lt;')}hello" }]
+ content: [{ text: 'hello' }]
}
])
end
end
- it 'prevents XSS injection' do
- trace = "#{section_start}section_end:1:2<script>alert('XSS Hack!');</script>#{section_end}"
+ it 'prints HTML tags as is' do
+ trace = "#{section_start}section_end:1:2<div>hello</div>#{section_end}"
expect(convert_json(trace)).to eq([
{
offset: 0,
- content: [{ text: "section_end:1:2&lt;script>alert('XSS Hack!');&lt;/script>" }],
+ content: [{ text: "section_end:1:2<div>hello</div>" }],
section: 'prepare-script',
section_header: true
},
{
- offset: 95,
+ offset: 75,
content: [],
section: 'prepare-script',
section_duration: '01:03'
- },
- {
- offset: 95,
- content: []
}
])
end
@@ -274,7 +277,7 @@ describe Gitlab::Ci::Ansi2json do
section_duration: '00:02'
},
{
- offset: 106,
+ offset: 155,
content: [{ text: 'baz' }],
section: 'prepare-script'
},
@@ -285,7 +288,7 @@ describe Gitlab::Ci::Ansi2json do
section_duration: '01:03'
},
{
- offset: 158,
+ offset: 200,
content: [{ text: 'world' }]
}
])
@@ -318,14 +321,10 @@ describe Gitlab::Ci::Ansi2json do
section_duration: '00:02'
},
{
- offset: 115,
+ offset: 164,
content: [],
section: 'prepare-script',
section_duration: '01:03'
- },
- {
- offset: 164,
- content: []
}
])
end
@@ -380,7 +379,7 @@ describe Gitlab::Ci::Ansi2json do
]
end
- it 'returns the full line' do
+ it 'returns the line since last partially processed line' do
expect(pass2.lines).to eq(lines)
expect(pass2.append).to be_truthy
end
@@ -399,7 +398,7 @@ describe Gitlab::Ci::Ansi2json do
]
end
- it 'returns the full line' do
+ it 'returns the line since last partially processed line' do
expect(pass2.lines).to eq(lines)
expect(pass2.append).to be_falsey
end
@@ -416,7 +415,7 @@ describe Gitlab::Ci::Ansi2json do
]
end
- it 'returns the full line' do
+ it 'returns a blank line and the next line' do
expect(pass2.lines).to eq(lines)
expect(pass2.append).to be_falsey
end
@@ -502,10 +501,6 @@ describe Gitlab::Ci::Ansi2json do
content: [],
section: 'prepare-script',
section_duration: '01:03'
- },
- {
- offset: 77,
- content: []
}
]
end
diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb
new file mode 100644
index 00000000000..3adde213f59
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/context/build_spec.rb
@@ -0,0 +1,26 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Context::Build do
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:seed_attributes) { { 'name' => 'some-job' } }
+
+ let(:context) { described_class.new(pipeline, seed_attributes) }
+
+ describe '#variables' do
+ subject { context.variables }
+
+ it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
+ it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) }
+ it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
+ it { is_expected.to include('CI_JOB_NAME' => 'some-job') }
+ it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') }
+
+ context 'without passed build-specific attributes' do
+ let(:context) { described_class.new(pipeline) }
+
+ it { is_expected.to include('CI_JOB_NAME' => nil) }
+ it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') }
+ it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb
new file mode 100644
index 00000000000..6bc8f862779
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/context/global_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Context::Global do
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:yaml_variables) { {} }
+
+ let(:context) { described_class.new(pipeline, yaml_variables: yaml_variables) }
+
+ describe '#variables' do
+ subject { context.variables }
+
+ it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
+ it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) }
+ it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
+
+ it { is_expected.not_to have_key('CI_JOB_NAME') }
+ it { is_expected.not_to have_key('CI_BUILD_REF_NAME') }
+
+ context 'with passed yaml variables' do
+ let(:yaml_variables) { [{ key: 'SUPPORTED', value: 'parsed', public: true }] }
+
+ it { is_expected.to include('SUPPORTED' => 'parsed') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
index 7140c14facb..66f2cb640b9 100644
--- a/spec/lib/gitlab/ci/build/policy/variables_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
@@ -16,7 +16,7 @@ describe Gitlab::Ci::Build::Policy::Variables do
let(:seed) do
double('build seed',
to_resource: ci_build,
- scoped_variables_hash: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables_hash
)
end
@@ -91,7 +91,7 @@ describe Gitlab::Ci::Build::Policy::Variables do
let(:seed) do
double('bridge seed',
to_resource: bridge,
- scoped_variables_hash: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables_hash
)
end
diff --git a/spec/lib/gitlab/ci/build/rules/rule_spec.rb b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
index 99852bd4228..04cdaa9d0ae 100644
--- a/spec/lib/gitlab/ci/build/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
@@ -1,10 +1,12 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Ci::Build::Rules::Rule do
let(:seed) do
double('build seed',
to_resource: ci_build,
- scoped_variables_hash: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables_hash
)
end
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index d7793ebc806..1ebcc4f9414 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Ci::Build::Rules do
@@ -7,11 +9,11 @@ describe Gitlab::Ci::Build::Rules do
let(:seed) do
double('build seed',
to_resource: ci_build,
- scoped_variables_hash: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables_hash
)
end
- let(:rules) { described_class.new(rule_list) }
+ let(:rules) { described_class.new(rule_list, default_when: 'on_success') }
describe '.new' do
let(:rules_ivar) { rules.instance_variable_get :@rule_list }
@@ -60,7 +62,7 @@ describe Gitlab::Ci::Build::Rules do
context 'with a specified default when:' do
let(:rule_list) { [{ if: '$VAR == null', when: 'always' }] }
- let(:rules) { described_class.new(rule_list, 'manual') }
+ let(:rules) { described_class.new(rule_list, default_when: 'manual') }
it 'sets @rule_list to an array of a single rule' do
expect(rules_ivar).to be_an(Array)
@@ -81,7 +83,7 @@ describe Gitlab::Ci::Build::Rules do
it { is_expected.to eq(described_class::Result.new('on_success')) }
context 'and when:manual set as the default' do
- let(:rules) { described_class.new(rule_list, 'manual') }
+ let(:rules) { described_class.new(rule_list, default_when: 'manual') }
it { is_expected.to eq(described_class::Result.new('manual')) }
end
@@ -93,7 +95,7 @@ describe Gitlab::Ci::Build::Rules do
it { is_expected.to eq(described_class::Result.new('never')) }
context 'and when:manual set as the default' do
- let(:rules) { described_class.new(rule_list, 'manual') }
+ let(:rules) { described_class.new(rule_list, default_when: 'manual') }
it { is_expected.to eq(described_class::Result.new('never')) }
end
@@ -157,7 +159,7 @@ describe Gitlab::Ci::Build::Rules do
it { is_expected.to eq(described_class::Result.new('never')) }
context 'and when:manual set as the default' do
- let(:rules) { described_class.new(rule_list, 'manual') }
+ let(:rules) { described_class.new(rule_list, default_when: 'manual') }
it 'does not return the default when:' do
expect(subject).to eq(described_class::Result.new('never'))
diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
index a7f457e0f5e..513a9b8f2b4 100644
--- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
@@ -28,6 +28,14 @@ describe Gitlab::Ci::Config::Entry::Artifacts do
expect(entry.value).to eq config
end
end
+
+ context "when value includes 'expose_as' keyword" do
+ let(:config) { { paths: %w[results.txt], expose_as: "Test results" } }
+
+ it 'returns general artifact and report-type artifacts configuration' do
+ expect(entry.value).to eq config
+ end
+ end
end
context 'when entry value is not correct' do
@@ -58,6 +66,84 @@ describe Gitlab::Ci::Config::Entry::Artifacts do
.to include 'artifacts reports should be a hash'
end
end
+
+ context "when 'expose_as' is not a string" do
+ let(:config) { { paths: %w[results.txt], expose_as: 1 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'artifacts expose as should be a string'
+ end
+ end
+
+ context "when 'expose_as' is too long" do
+ let(:config) { { paths: %w[results.txt], expose_as: 'A' * 101 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'artifacts expose as is too long (maximum is 100 characters)'
+ end
+ end
+
+ context "when 'expose_as' is an empty string" do
+ let(:config) { { paths: %w[results.txt], expose_as: '' } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'artifacts expose as ' + Gitlab::Ci::Config::Entry::Artifacts::EXPOSE_AS_ERROR_MESSAGE
+ end
+ end
+
+ context "when 'expose_as' contains invalid characters" do
+ let(:config) do
+ { paths: %w[results.txt], expose_as: '<script>alert("xss");</script>' }
+ end
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'artifacts expose as ' + Gitlab::Ci::Config::Entry::Artifacts::EXPOSE_AS_ERROR_MESSAGE
+ end
+ end
+
+ context "when 'expose_as' is used without 'paths'" do
+ let(:config) { { expose_as: 'Test results' } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "artifacts paths can't be blank"
+ end
+ end
+
+ context "when 'paths' includes '*' and 'expose_as' is defined" do
+ let(:config) { { expose_as: 'Test results', paths: ['test.txt', 'test*.txt'] } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "artifacts paths can't contain '*' when used with 'expose_as'"
+ end
+ end
+ end
+
+ context 'when feature flag :ci_expose_arbitrary_artifacts_in_mr is disabled' do
+ before do
+ stub_feature_flags(ci_expose_arbitrary_artifacts_in_mr: false)
+ end
+
+ context 'when syntax is correct' do
+ let(:config) { { expose_as: 'Test results', paths: ['test.txt'] } }
+
+ it 'is valid' do
+ expect(entry.errors).to be_empty
+ end
+ end
+
+ context 'when syntax for :expose_as is incorrect' do
+ let(:config) { { paths: %w[results.txt], expose_as: '' } }
+
+ it 'is valid' do
+ expect(entry.errors).to be_empty
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 9aab3664e1c..4fa0a57dc82 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -12,22 +12,53 @@ describe Gitlab::Ci::Config::Entry::Cache do
context 'when entry config value is correct' do
let(:policy) { nil }
+ let(:key) { 'some key' }
let(:config) do
- { key: 'some key',
+ { key: key,
untracked: true,
paths: ['some/path/'],
policy: policy }
end
describe '#value' do
- it 'returns hash value' do
- expect(entry.value).to eq(key: 'some key', untracked: true, paths: ['some/path/'], policy: 'pull-push')
+ shared_examples 'hash key value' do
+ it 'returns hash value' do
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push')
+ end
+ end
+
+ it_behaves_like 'hash key value'
+
+ context 'with files' do
+ let(:key) { { files: ['a-file', 'other-file'] } }
+
+ it_behaves_like 'hash key value'
+ end
+
+ context 'with files and prefix' do
+ let(:key) { { files: ['a-file', 'other-file'], prefix: 'prefix-value' } }
+
+ it_behaves_like 'hash key value'
+ end
+
+ context 'with prefix' do
+ let(:key) { { prefix: 'prefix-value' } }
+
+ it 'key is nil' do
+ expect(entry.value).to match(a_hash_including(key: nil))
+ end
end
end
describe '#valid?' do
it { is_expected.to be_valid }
+
+ context 'with files' do
+ let(:key) { { files: ['a-file', 'other-file'] } }
+
+ it { is_expected.to be_valid }
+ end
end
context 'policy is pull-push' do
@@ -87,10 +118,44 @@ describe Gitlab::Ci::Config::Entry::Cache do
end
context 'when descendants are invalid' do
- let(:config) { { key: 1 } }
+ context 'with invalid keys' do
+ let(:config) { { key: 1 } }
- it 'reports error with descendants' do
- is_expected.to include 'key config should be a string or symbol'
+ it 'reports error with descendants' do
+ is_expected.to include 'key should be a hash, a string or a symbol'
+ end
+ end
+
+ context 'with empty key' do
+ let(:config) { { key: {} } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
+ end
+ end
+
+ context 'with invalid files' do
+ let(:config) { { key: { files: 'a-file' } } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'key:files config should be an array of strings'
+ end
+ end
+
+ context 'with prefix without files' do
+ let(:config) { { key: { prefix: 'a-prefix' } } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { key: { unknown: 'a-file' } } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'key config contains unknown keys: unknown'
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/commands_spec.rb b/spec/lib/gitlab/ci/config/entry/commands_spec.rb
index 269a3406913..8e7f9ab9706 100644
--- a/spec/lib/gitlab/ci/config/entry/commands_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/commands_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Commands do
let(:entry) { described_class.new(config) }
- context 'when entry config value is an array' do
+ context 'when entry config value is an array of strings' do
let(:config) { %w(ls pwd) }
describe '#value' do
@@ -37,13 +37,74 @@ describe Gitlab::Ci::Config::Entry::Commands do
end
end
- context 'when entry value is not valid' do
+ context 'when entry config value is array of arrays of strings' do
+ let(:config) { [['ls'], ['pwd', 'echo 1']] }
+
+ describe '#value' do
+ it 'returns array of strings' do
+ expect(entry.value).to eq ['ls', 'pwd', 'echo 1']
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry config value is array of strings and arrays of strings' do
+ let(:config) { ['ls', ['pwd', 'echo 1']] }
+
+ describe '#value' do
+ it 'returns array of strings' do
+ expect(entry.value).to eq ['ls', 'pwd', 'echo 1']
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is integer' do
let(:config) { 1 }
describe '#errors' do
it 'saves errors' do
expect(entry.errors)
- .to include 'commands config should be an array of strings or a string'
+ .to include 'commands config should be a string or an array containing strings and arrays of strings'
+ end
+ end
+ end
+
+ context 'when entry value is multi-level nested array' do
+ let(:config) { [['ls', ['echo 1']], 'pwd'] }
+
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include 'commands config should be a string or an array containing strings and arrays of strings'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/default_spec.rb b/spec/lib/gitlab/ci/config/entry/default_spec.rb
index 27d63dbd407..dad4f408e50 100644
--- a/spec/lib/gitlab/ci/config/entry/default_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/default_spec.rb
@@ -5,6 +5,18 @@ require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Default do
let(:entry) { described_class.new(config) }
+ it_behaves_like 'with inheritable CI config' do
+ let(:inheritable_key) { nil }
+ let(:inheritable_class) { Gitlab::Ci::Config::Entry::Root }
+
+ # These are entries defined in Root
+ # that we know that we don't want to inherit
+ # as they do not have sense in context of Default
+ let(:ignored_inheritable_columns) do
+ %i[default include variables stages types workflow]
+ end
+ end
+
describe '.nodes' do
it 'returns a hash' do
expect(described_class.nodes).to be_a(Hash)
@@ -14,7 +26,7 @@ describe Gitlab::Ci::Config::Entry::Default do
it 'contains the expected node names' do
expect(described_class.nodes.keys)
.to match_array(%i[before_script image services
- after_script cache])
+ after_script cache interruptible])
end
end
end
@@ -87,7 +99,7 @@ describe Gitlab::Ci::Config::Entry::Default do
it 'raises error' do
expect { entry.compose!(deps) }.to raise_error(
- Gitlab::Ci::Config::Entry::Default::DuplicateError)
+ Gitlab::Ci::Config::Entry::Default::InheritError)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/files_spec.rb b/spec/lib/gitlab/ci/config/entry/files_spec.rb
new file mode 100644
index 00000000000..2bebbd7b198
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/files_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Files do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'when entry config value is valid' do
+ let(:config) { ['some/file', 'some/path/'] }
+
+ describe '#value' do
+ it 'returns key value' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ describe '#errors' do
+ context 'when entry value is not an array' do
+ let(:config) { 'string' }
+
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include 'files config should be an array of strings'
+ end
+ end
+
+ context 'when entry value is not an array of strings' do
+ let(:config) { [1] }
+
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include 'files config should be an array of strings'
+ end
+ end
+
+ context 'when entry value contains more than two values' do
+ let(:config) { %w[file1 file2 file3] }
+
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include 'files config has too many items (maximum is 2)'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 1c4887e87c4..fe83171c57a 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -5,14 +5,26 @@ require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Job do
let(:entry) { described_class.new(config, name: :rspec) }
+ it_behaves_like 'with inheritable CI config' do
+ let(:inheritable_key) { 'default' }
+ let(:inheritable_class) { Gitlab::Ci::Config::Entry::Default }
+
+ # These are entries defined in Default
+ # that we know that we don't want to inherit
+ # as they do not have sense in context of Job
+ let(:ignored_inheritable_columns) do
+ %i[]
+ end
+ end
+
describe '.nodes' do
context 'when filtering all the entry/node names' do
subject { described_class.nodes.keys }
let(:result) do
%i[before_script script stage type after_script cache
- image services only except rules variables artifacts
- environment coverage retry]
+ image services only except rules needs variables artifacts
+ environment coverage retry interruptible]
end
it { is_expected.to match_array result }
@@ -372,21 +384,6 @@ describe Gitlab::Ci::Config::Entry::Job do
end
context 'when has needs' do
- context 'that are not a array of strings' do
- let(:config) do
- {
- stage: 'test',
- script: 'echo',
- needs: 'build-job'
- }
- end
-
- it 'returns error about invalid type' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'job needs should be an array of strings'
- end
- end
-
context 'when have dependencies that are not subset of needs' do
let(:config) do
{
diff --git a/spec/lib/gitlab/ci/config/entry/key_spec.rb b/spec/lib/gitlab/ci/config/entry/key_spec.rb
index a7874447725..327607e2266 100644
--- a/spec/lib/gitlab/ci/config/entry/key_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/key_spec.rb
@@ -6,38 +6,38 @@ describe Gitlab::Ci::Config::Entry::Key do
let(:entry) { described_class.new(config) }
describe 'validations' do
- shared_examples 'key with slash' do
- it 'is invalid' do
- expect(entry).not_to be_valid
- end
+ it_behaves_like 'key entry validations', 'simple key'
- it 'reports errors with config value' do
- expect(entry.errors).to include 'key config cannot contain the "/" character'
- end
- end
+ context 'when entry config value is correct' do
+ context 'when key is a hash' do
+ let(:config) { { files: ['test'], prefix: 'something' } }
- shared_examples 'key with only dots' do
- it 'is invalid' do
- expect(entry).not_to be_valid
- end
+ describe '#value' do
+ it 'returns key value' do
+ expect(entry.value).to match(config)
+ end
+ end
- it 'reports errors with config value' do
- expect(entry.errors).to include 'key config cannot be "." or ".."'
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
end
- end
- context 'when entry config value is correct' do
- let(:config) { 'test' }
+ context 'when key is a symbol' do
+ let(:config) { :key }
- describe '#value' do
- it 'returns key value' do
- expect(entry.value).to eq 'test'
+ describe '#value' do
+ it 'returns key value' do
+ expect(entry.value).to eq(config.to_s)
+ end
end
- end
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
end
end
end
@@ -47,53 +47,11 @@ describe Gitlab::Ci::Config::Entry::Key do
describe '#errors' do
it 'saves errors' do
- expect(entry.errors)
- .to include 'key config should be a string or symbol'
+ expect(entry.errors.first)
+ .to match /should be a hash, a string or a symbol/
end
end
end
-
- context 'when entry value contains slash' do
- let(:config) { 'key/with/some/slashes' }
-
- it_behaves_like 'key with slash'
- end
-
- context 'when entry value contains URI encoded slash (%2F)' do
- let(:config) { 'key%2Fwith%2Fsome%2Fslashes' }
-
- it_behaves_like 'key with slash'
- end
-
- context 'when entry value is a dot' do
- let(:config) { '.' }
-
- it_behaves_like 'key with only dots'
- end
-
- context 'when entry value is two dots' do
- let(:config) { '..' }
-
- it_behaves_like 'key with only dots'
- end
-
- context 'when entry value is a URI encoded dot (%2E)' do
- let(:config) { '%2e' }
-
- it_behaves_like 'key with only dots'
- end
-
- context 'when entry value is two URI encoded dots (%2E)' do
- let(:config) { '%2E%2e' }
-
- it_behaves_like 'key with only dots'
- end
-
- context 'when entry value is one dot and one URI encoded dot' do
- let(:config) { '.%2e' }
-
- it_behaves_like 'key with only dots'
- end
end
describe '.default' do
diff --git a/spec/lib/gitlab/ci/config/entry/need_spec.rb b/spec/lib/gitlab/ci/config/entry/need_spec.rb
new file mode 100644
index 00000000000..d119e604900
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/need_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::Gitlab::Ci::Config::Entry::Need do
+ subject(:need) { described_class.new(config) }
+
+ context 'when job is specified' do
+ let(:config) { 'job_name' }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(name: 'job_name')
+ end
+ end
+ end
+
+ context 'when need is empty' do
+ let(:config) { '' }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'is returns an error about an empty config' do
+ expect(need.errors)
+ .to contain_exactly("job config can't be blank")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/needs_spec.rb b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
new file mode 100644
index 00000000000..f4a76b52d30
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::Gitlab::Ci::Config::Entry::Needs do
+ subject(:needs) { described_class.new(config) }
+
+ before do
+ needs.metadata[:allowed_needs] = %i[job]
+ end
+
+ describe 'validations' do
+ before do
+ needs.compose!
+ end
+
+ context 'when entry config value is correct' do
+ let(:config) { ['job_name'] }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+ end
+
+ context 'when config value has wrong type' do
+ let(:config) { 123 }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about incorrect type' do
+ expect(needs.errors)
+ .to include('needs config can only be a hash or an array')
+ end
+ end
+ end
+
+ context 'when wrong needs type is used' do
+ let(:config) { [123] }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about incorrect type' do
+ expect(needs.errors).to contain_exactly(
+ 'need has an unsupported type')
+ end
+ end
+ end
+ end
+
+ describe '.compose!' do
+ context 'when valid job entries composed' do
+ let(:config) { %w[first_job_name second_job_name] }
+
+ before do
+ needs.compose!
+ end
+
+ describe '#value' do
+ it 'returns key value' do
+ expect(needs.value).to eq(
+ job: [
+ { name: 'first_job_name' },
+ { name: 'second_job_name' }
+ ]
+ )
+ end
+ end
+
+ describe '#descendants' do
+ it 'creates valid descendant nodes' do
+ expect(needs.descendants.count).to eq 2
+ expect(needs.descendants)
+ .to all(be_an_instance_of(::Gitlab::Ci::Config::Entry::Need))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/prefix_spec.rb b/spec/lib/gitlab/ci/config/entry/prefix_spec.rb
new file mode 100644
index 00000000000..8132a674488
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/prefix_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Prefix do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ it_behaves_like 'key entry validations', :prefix
+
+ context 'when entry value is not correct' do
+ let(:config) { ['incorrect'] }
+
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include 'prefix config should be a string or symbol'
+ end
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default key' do
+ expect(described_class.default).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 7e1a80414d4..43bd53b780f 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -12,10 +12,14 @@ describe Gitlab::Ci::Config::Entry::Root do
context 'when filtering all the entry/node names' do
it 'contains the expected node names' do
+ # No inheritable fields should be added to the `Root`
+ #
+ # Inheritable configuration can only be added to `default:`
+ #
+ # The purpose of `Root` is have only globally defined configuration.
expect(described_class.nodes.keys)
- .to match_array(%i[before_script image services
- after_script variables cache
- stages types include default])
+ .to match_array(%i[before_script image services after_script
+ variables cache stages types include default workflow])
end
end
end
@@ -45,7 +49,7 @@ describe Gitlab::Ci::Config::Entry::Root do
end
it 'creates node object for each entry' do
- expect(root.descendants.count).to eq 10
+ expect(root.descendants.count).to eq 11
end
it 'creates node object using valid class' do
@@ -198,7 +202,7 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#nodes' do
it 'instantizes all nodes' do
- expect(root.descendants.count).to eq 10
+ expect(root.descendants.count).to eq 11
end
it 'contains unspecified nodes' do
@@ -293,7 +297,7 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#errors' do
it 'reports errors from child nodes' do
expect(root.errors)
- .to include 'before_script config should be an array of strings'
+ .to include 'before_script config should be an array containing strings and arrays of strings'
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 9d4f7153cd0..216f5d0c77d 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -1,10 +1,22 @@
+# frozen_string_literal: true
+
require 'fast_spec_helper'
require 'gitlab_chronic_duration'
require 'support/helpers/stub_feature_flags'
require_dependency 'active_model'
describe Gitlab::Ci::Config::Entry::Rules::Rule do
- let(:entry) { described_class.new(config) }
+ let(:factory) do
+ Gitlab::Config::Entry::Factory.new(described_class)
+ .metadata(metadata)
+ .value(config)
+ end
+
+ let(:metadata) do
+ { allowed_when: %w[on_success on_failure always never manual delayed] }
+ end
+
+ let(:entry) { factory.create! }
describe '.new' do
subject { entry }
@@ -210,6 +222,112 @@ describe Gitlab::Ci::Config::Entry::Rules::Rule do
.to include(/should be a hash/)
end
end
+
+ context 'when: validation' do
+ context 'with an invalid boolean when:' do
+ let(:config) do
+ { if: '$THIS == "that"', when: false }
+ end
+
+ it { is_expected.to be_a(described_class) }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: false/)
+ end
+
+ context 'when composed' do
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: false/)
+ end
+ end
+ end
+
+ context 'with an invalid string when:' do
+ let(:config) do
+ { if: '$THIS == "that"', when: 'explode' }
+ end
+
+ it { is_expected.to be_a(described_class) }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: explode/)
+ end
+
+ context 'when composed' do
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: explode/)
+ end
+ end
+ end
+
+ context 'with a string passed in metadata but not allowed in the class' do
+ let(:metadata) { { allowed_when: %w[explode] } }
+
+ let(:config) do
+ { if: '$THIS == "that"', when: 'explode' }
+ end
+
+ it { is_expected.to be_a(described_class) }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: explode/)
+ end
+
+ context 'when composed' do
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: explode/)
+ end
+ end
+ end
+
+ context 'with a string allowed in the class but not passed in metadata' do
+ let(:metadata) { { allowed_when: %w[always never] } }
+
+ let(:config) do
+ { if: '$THIS == "that"', when: 'on_success' }
+ end
+
+ it { is_expected.to be_a(described_class) }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: on_success/)
+ end
+
+ context 'when composed' do
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid when:' do
+ expect(subject.errors).to include(/when unknown value: on_success/)
+ end
+ end
+ end
+ end
end
describe '#value' do
diff --git a/spec/lib/gitlab/ci/config/entry/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
index 291e7373daf..3c050801023 100644
--- a/spec/lib/gitlab/ci/config/entry/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
@@ -1,9 +1,18 @@
+# frozen_string_literal: true
+
require 'fast_spec_helper'
require 'support/helpers/stub_feature_flags'
require_dependency 'active_model'
describe Gitlab::Ci::Config::Entry::Rules do
- let(:entry) { described_class.new(config) }
+ let(:factory) do
+ Gitlab::Config::Entry::Factory.new(described_class)
+ .metadata(metadata)
+ .value(config)
+ end
+
+ let(:metadata) { { allowed_when: %w[always never] } }
+ let(:entry) { factory.create! }
describe '.new' do
subject { entry }
@@ -16,7 +25,7 @@ describe Gitlab::Ci::Config::Entry::Rules do
it { is_expected.to be_a(described_class) }
it { is_expected.to be_valid }
- context 'after #compose!' do
+ context 'when composed' do
before do
subject.compose!
end
@@ -36,7 +45,7 @@ describe Gitlab::Ci::Config::Entry::Rules do
it { is_expected.to be_a(described_class) }
it { is_expected.to be_valid }
- context 'after #compose!' do
+ context 'when composed' do
before do
subject.compose!
end
@@ -52,48 +61,6 @@ describe Gitlab::Ci::Config::Entry::Rules do
it { is_expected.not_to be_valid }
end
-
- context 'with an invalid boolean when:' do
- let(:config) do
- [{ if: '$THIS == "that"', when: false }]
- end
-
- it { is_expected.to be_a(described_class) }
- it { is_expected.to be_valid }
-
- context 'after #compose!' do
- before do
- subject.compose!
- end
-
- it { is_expected.not_to be_valid }
-
- it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: false/)
- end
- end
- end
-
- context 'with an invalid string when:' do
- let(:config) do
- [{ if: '$THIS == "that"', when: 'explode' }]
- end
-
- it { is_expected.to be_a(described_class) }
- it { is_expected.to be_valid }
-
- context 'after #compose!' do
- before do
- subject.compose!
- end
-
- it { is_expected.not_to be_valid }
-
- it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: explode/)
- end
- end
- end
end
describe '#value' do
diff --git a/spec/lib/gitlab/ci/config/entry/script_spec.rb b/spec/lib/gitlab/ci/config/entry/script_spec.rb
index d523243d3b6..57dc20ea628 100644
--- a/spec/lib/gitlab/ci/config/entry/script_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/script_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Config::Entry::Script do
let(:entry) { described_class.new(config) }
describe 'validations' do
- context 'when entry config value is correct' do
+ context 'when entry config value is array of strings' do
let(:config) { %w(ls pwd) }
describe '#value' do
@@ -28,13 +28,74 @@ describe Gitlab::Ci::Config::Entry::Script do
end
end
- context 'when entry value is not correct' do
+ context 'when entry config value is array of arrays of strings' do
+ let(:config) { [['ls'], ['pwd', 'echo 1']] }
+
+ describe '#value' do
+ it 'returns array of strings' do
+ expect(entry.value).to eq ['ls', 'pwd', 'echo 1']
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry config value is array containing strings and arrays of strings' do
+ let(:config) { ['ls', ['pwd', 'echo 1']] }
+
+ describe '#value' do
+ it 'returns array of strings' do
+ expect(entry.value).to eq ['ls', 'pwd', 'echo 1']
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is string' do
let(:config) { 'ls' }
describe '#errors' do
it 'saves errors' do
expect(entry.errors)
- .to include 'script config should be an array of strings'
+ .to include 'script config should be an array containing strings and arrays of strings'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is multi-level nested array' do
+ let(:config) { [['ls', ['echo 1']], 'pwd'] }
+
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include 'script config should be an array containing strings and arrays of strings'
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
new file mode 100644
index 00000000000..f2832b94bf0
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Workflow do
+ let(:factory) { Gitlab::Config::Entry::Factory.new(described_class).value(rules_hash) }
+ let(:config) { factory.create! }
+
+ describe 'validations' do
+ context 'when work config value is a string' do
+ let(:rules_hash) { 'build' }
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'attaches an error specifying that workflow should point to a hash' do
+ expect(config.errors).to include('workflow config should be a hash')
+ end
+ end
+
+ describe '#value' do
+ it 'returns the invalid configuration' do
+ expect(config.value).to eq(rules_hash)
+ end
+ end
+ end
+
+ context 'when work config value is a hash' do
+ let(:rules_hash) { { rules: [{ if: '$VAR' }] } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'attaches no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+
+ describe '#value' do
+ it 'returns the config' do
+ expect(config.value).to eq(rules_hash)
+ end
+ end
+
+ context 'with an invalid key' do
+ let(:rules_hash) { { trash: [{ if: '$VAR' }] } }
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'attaches an error specifying the unknown key' do
+ expect(config.errors).to include('workflow config contains unknown keys: trash')
+ end
+ end
+
+ describe '#value' do
+ it 'returns the invalid configuration' do
+ expect(config.value).to eq(rules_hash)
+ end
+ end
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'is nil' do
+ expect(described_class.default).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index 6b766cc37bf..bf880478387 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -7,6 +7,16 @@ describe Gitlab::Ci::Config::Normalizer do
let(:job_config) { { script: 'rspec', parallel: 5, name: 'rspec' } }
let(:config) { { job_name => job_config } }
+ let(:expanded_job_names) do
+ [
+ "rspec 1/5",
+ "rspec 2/5",
+ "rspec 3/5",
+ "rspec 4/5",
+ "rspec 5/5"
+ ]
+ end
+
describe '.normalize_jobs' do
subject { described_class.new(config).normalize_jobs }
@@ -15,9 +25,7 @@ describe Gitlab::Ci::Config::Normalizer do
end
it 'has parallelized jobs' do
- job_names = [:"rspec 1/5", :"rspec 2/5", :"rspec 3/5", :"rspec 4/5", :"rspec 5/5"]
-
- is_expected.to include(*job_names)
+ is_expected.to include(*expanded_job_names.map(&:to_sym))
end
it 'sets job instance in options' do
@@ -43,49 +51,109 @@ describe Gitlab::Ci::Config::Normalizer do
let(:job_name) { :"rspec 35/2" }
it 'properly parallelizes job names' do
- job_names = [:"rspec 35/2 1/5", :"rspec 35/2 2/5", :"rspec 35/2 3/5", :"rspec 35/2 4/5", :"rspec 35/2 5/5"]
+ job_names = [
+ :"rspec 35/2 1/5",
+ :"rspec 35/2 2/5",
+ :"rspec 35/2 3/5",
+ :"rspec 35/2 4/5",
+ :"rspec 35/2 5/5"
+ ]
is_expected.to include(*job_names)
end
end
- %i[dependencies needs].each do |context|
- context "when job has #{context} on parallelized jobs" do
+ context 'for dependencies' do
+ context "when job has dependencies on parallelized jobs" do
let(:config) do
{
job_name => job_config,
- other_job: { script: 'echo 1', context => [job_name.to_s] }
+ other_job: { script: 'echo 1', dependencies: [job_name.to_s] }
}
end
- it "parallelizes #{context}" do
- job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
-
- expect(subject[:other_job][context]).to include(*job_names)
+ it "parallelizes dependencies" do
+ expect(subject[:other_job][:dependencies]).to eq(expanded_job_names)
end
it "does not include original job name in #{context}" do
- expect(subject[:other_job][context]).not_to include(job_name)
+ expect(subject[:other_job][:dependencies]).not_to include(job_name)
end
end
- context "when there are #{context} which are both parallelized and not" do
+ context "when there are dependencies which are both parallelized and not" do
let(:config) do
{
job_name => job_config,
other_job: { script: 'echo 1' },
- final_job: { script: 'echo 1', context => [job_name.to_s, "other_job"] }
+ final_job: { script: 'echo 1', dependencies: [job_name.to_s, "other_job"] }
}
end
- it "parallelizes #{context}" do
+ it "parallelizes dependencies" do
job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
- expect(subject[:final_job][context]).to include(*job_names)
+ expect(subject[:final_job][:dependencies]).to include(*job_names)
+ end
+
+ it "includes the regular job in dependencies" do
+ expect(subject[:final_job][:dependencies]).to include('other_job')
+ end
+ end
+ end
+
+ context 'for needs' do
+ let(:expanded_job_attributes) do
+ expanded_job_names.map do |job_name|
+ { name: job_name }
+ end
+ end
+
+ context "when job has needs on parallelized jobs" do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: {
+ script: 'echo 1',
+ needs: {
+ job: [
+ { name: job_name.to_s }
+ ]
+ }
+ }
+ }
+ end
+
+ it "parallelizes needs" do
+ expect(subject.dig(:other_job, :needs, :job)).to eq(expanded_job_attributes)
+ end
+ end
+
+ context "when there are dependencies which are both parallelized and not" do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: {
+ script: 'echo 1'
+ },
+ final_job: {
+ script: 'echo 1',
+ needs: {
+ job: [
+ { name: job_name.to_s },
+ { name: "other_job" }
+ ]
+ }
+ }
+ }
+ end
+
+ it "parallelizes dependencies" do
+ expect(subject.dig(:final_job, :needs, :job)).to include(*expanded_job_attributes)
end
- it "includes the regular job in #{context}" do
- expect(subject[:final_job][context]).to include('other_job')
+ it "includes the regular job in dependencies" do
+ expect(subject.dig(:final_job, :needs, :job)).to include(name: 'other_job')
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index ba4f841cf43..a631cd2777b 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -11,6 +11,7 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
[{ key: 'first', secret_value: 'world' },
{ key: 'second', secret_value: 'second_world' }]
end
+
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
source: :push,
@@ -51,12 +52,6 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
.to eq variables_attributes.map(&:with_indifferent_access)
end
- it 'sets a valid config source' do
- step.perform!
-
- expect(pipeline.repository_source?).to be true
- end
-
it 'returns a valid pipeline' do
step.perform!
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
new file mode 100644
index 00000000000..7b76adaf683
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:pipeline) { build(:ci_pipeline, project: project) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ describe '#perform!' do
+ context 'when pipeline has been skipped by workflow configuration' do
+ before do
+ allow(step).to receive(:workflow_passed?)
+ .and_return(false)
+
+ step.perform!
+ end
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+
+ it 'attaches an error to the pipeline' do
+ expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
+ end
+ end
+
+ context 'when pipeline has not been skipped by workflow configuration' do
+ before do
+ allow(step).to receive(:workflow_passed?)
+ .and_return(true)
+
+ step.perform!
+ end
+
+ it 'continues the pipeline processing chain' do
+ expect(step.break?).to be false
+ end
+
+ it 'does not skip the pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline).not_to be_skipped
+ end
+
+ it 'attaches no errors' do
+ expect(pipeline.errors).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 9bccd5be4fe..52e9432dc92 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -7,9 +7,7 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
set(:user) { create(:user) }
let(:pipeline) do
- build(:ci_pipeline_with_one_job, project: project,
- ref: 'master',
- user: user)
+ build(:ci_pipeline, project: project, ref: 'master', user: user)
end
let(:command) do
@@ -20,11 +18,32 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
seeds_block: nil)
end
+ let(:dependencies) do
+ [
+ Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Seed.new(pipeline, command)
+ ]
+ end
+
let(:step) { described_class.new(pipeline, command) }
+ let(:config) do
+ { rspec: { script: 'rspec' } }
+ end
+
+ def run_chain
+ dependencies.map(&:perform!)
+ step.perform!
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
context 'when pipeline doesn not have seeds block' do
before do
- step.perform!
+ run_chain
end
it 'does not persist the pipeline' do
@@ -59,12 +78,8 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
} }
end
- let(:pipeline) do
- build(:ci_pipeline, project: project, config: config)
- end
-
before do
- step.perform!
+ run_chain
end
it 'breaks the chain' do
@@ -82,16 +97,16 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
end
describe 'pipeline protect' do
- subject { step.perform! }
-
context 'when ref is protected' do
before do
allow(project).to receive(:protected_for?).with('master').and_return(true)
allow(project).to receive(:protected_for?).with('refs/heads/master').and_return(true)
+
+ dependencies.map(&:perform!)
end
it 'does not protect the pipeline' do
- subject
+ run_chain
expect(pipeline.protected).to eq(true)
end
@@ -99,7 +114,7 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
context 'when ref is not protected' do
it 'does not protect the pipeline' do
- subject
+ run_chain
expect(pipeline.protected).to eq(false)
end
@@ -112,7 +127,7 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
end
before do
- step.perform!
+ run_chain
end
it 'breaks the chain' do
@@ -144,7 +159,7 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
end
it 'populates pipeline with resources described in the seeds block' do
- step.perform!
+ run_chain
expect(pipeline).not_to be_persisted
expect(pipeline.variables).not_to be_empty
@@ -154,7 +169,7 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
end
it 'has pipeline iid' do
- step.perform!
+ run_chain
expect(pipeline.iid).to be > 0
end
@@ -166,7 +181,7 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
end
it 'wastes pipeline iid' do
- expect { step.perform! }.to raise_error(ActiveRecord::RecordNotSaved)
+ expect { run_chain }.to raise_error(ActiveRecord::RecordNotSaved)
last_iid = InternalId.ci_pipelines
.where(project_id: project.id)
@@ -181,14 +196,14 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
let(:pipeline) { create(:ci_pipeline, project: project) }
it 'raises error' do
- expect { step.perform! }.to raise_error(described_class::PopulateError)
+ expect { run_chain }.to raise_error(described_class::PopulateError)
end
end
context 'when variables policy is specified' do
shared_examples_for 'a correct pipeline' do
it 'populates pipeline according to used policies' do
- step.perform!
+ run_chain
expect(pipeline.stages.size).to eq 1
expect(pipeline.stages.first.statuses.size).to eq 1
@@ -202,10 +217,6 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
prod: { script: 'cap prod', stage: 'deploy', only: ['tags'] } }
end
- let(:pipeline) do
- build(:ci_pipeline, ref: 'master', project: project, config: config)
- end
-
it_behaves_like 'a correct pipeline'
context 'when variables expression is specified' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb
index 7c1c016b4bb..92eadf5548c 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb
@@ -2,32 +2,38 @@
require 'spec_helper'
-describe Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs do
- let(:project) { create(:project, :repository) }
+describe ::Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs do
+ let(:project) { create(:project) }
let(:pipeline) do
- build(:ci_pipeline_with_one_job, project: project, ref: 'master')
+ build(:ci_pipeline, project: project)
end
let(:command) do
- double(:command, project: project, chat_data: { command: 'echo' })
+ double(:command,
+ config_processor: double(:processor,
+ jobs: { echo: double(:job_echo), rspec: double(:job_rspec) }),
+ project: project,
+ chat_data: { command: 'echo' })
end
describe '#perform!' do
- it 'removes unwanted jobs for chat pipelines' do
- allow(pipeline).to receive(:chat?).and_return(true)
+ subject { described_class.new(pipeline, command).perform! }
- pipeline.config_processor.jobs[:echo] = double(:job)
+ it 'removes unwanted jobs for chat pipelines' do
+ expect(pipeline).to receive(:chat?).and_return(true)
- described_class.new(pipeline, command).perform!
+ subject
- expect(pipeline.config_processor.jobs.keys).to eq([:echo])
+ expect(command.config_processor.jobs.keys).to eq([:echo])
end
- end
- it 'does not remove any jobs for non-chat pipelines' do
- described_class.new(pipeline, command).perform!
+ it 'does not remove any jobs for non chat-pipelines' do
+ expect(pipeline).to receive(:chat?).and_return(false)
- expect(pipeline.config_processor.jobs.keys).to eq([:rspec])
+ subject
+
+ expect(command.config_processor.jobs.keys).to eq([:echo, :rspec])
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
new file mode 100644
index 00000000000..aa54f19b26c
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Seed do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user, developer_projects: [project]) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ project: project,
+ current_user: user,
+ origin_ref: 'master',
+ seeds_block: nil)
+ end
+
+ def run_chain(pipeline, command)
+ [
+ Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command)
+ ].map(&:perform!)
+
+ described_class.new(pipeline, command).perform!
+ end
+
+ let(:pipeline) { build(:ci_pipeline, project: project) }
+
+ describe '#perform!' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ run_chain(pipeline, command)
+ end
+
+ let(:config) do
+ { rspec: { script: 'rake' } }
+ end
+
+ it 'allocates next IID' do
+ expect(pipeline.iid).to be_present
+ end
+
+ it 'sets the seeds in the command object' do
+ expect(command.stage_seeds).to all(be_a Gitlab::Ci::Pipeline::Seed::Base)
+ expect(command.stage_seeds.count).to eq 1
+ end
+
+ context 'when no ref policy is specified' do
+ let(:config) do
+ {
+ production: { stage: 'deploy', script: 'cap prod' },
+ rspec: { stage: 'test', script: 'rspec' },
+ spinach: { stage: 'test', script: 'spinach' }
+ }
+ end
+
+ it 'correctly fabricates a stage seeds object' do
+ seeds = command.stage_seeds
+ expect(seeds.size).to eq 2
+ expect(seeds.first.attributes[:name]).to eq 'test'
+ expect(seeds.second.attributes[:name]).to eq 'deploy'
+ expect(seeds.dig(0, 0, :name)).to eq 'rspec'
+ expect(seeds.dig(0, 1, :name)).to eq 'spinach'
+ expect(seeds.dig(1, 0, :name)).to eq 'production'
+ end
+ end
+
+ context 'when refs policy is specified' do
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, ref: 'feature', tag: true)
+ end
+
+ let(:config) do
+ {
+ production: { stage: 'deploy', script: 'cap prod', only: ['master'] },
+ spinach: { stage: 'test', script: 'spinach', only: ['tags'] }
+ }
+ end
+
+ it 'returns stage seeds only assigned to master' do
+ seeds = command.stage_seeds
+
+ expect(seeds.size).to eq 1
+ expect(seeds.first.attributes[:name]).to eq 'test'
+ expect(seeds.dig(0, 0, :name)).to eq 'spinach'
+ end
+ end
+
+ context 'when source policy is specified' do
+ let(:pipeline) { create(:ci_pipeline, source: :schedule) }
+
+ let(:config) do
+ {
+ production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] },
+ spinach: { stage: 'test', script: 'spinach', only: ['schedules'] }
+ }
+ end
+
+ it 'returns stage seeds only assigned to schedules' do
+ seeds = command.stage_seeds
+
+ expect(seeds.size).to eq 1
+ expect(seeds.first.attributes[:name]).to eq 'test'
+ expect(seeds.dig(0, 0, :name)).to eq 'spinach'
+ end
+ end
+
+ context 'when kubernetes policy is specified' do
+ let(:config) do
+ {
+ spinach: { stage: 'test', script: 'spinach' },
+ production: {
+ stage: 'deploy',
+ script: 'cap',
+ only: { kubernetes: 'active' }
+ }
+ }
+ end
+
+ context 'when kubernetes is active' do
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+ let(:pipeline) { build(:ci_pipeline, project: project) }
+
+ it 'returns seeds for kubernetes dependent job' do
+ seeds = command.stage_seeds
+
+ expect(seeds.size).to eq 2
+ expect(seeds.dig(0, 0, :name)).to eq 'spinach'
+ expect(seeds.dig(1, 0, :name)).to eq 'production'
+ end
+ end
+ end
+
+ context 'when kubernetes is not active' do
+ it 'does not return seeds for kubernetes dependent job' do
+ seeds = command.stage_seeds
+
+ expect(seeds.size).to eq 1
+ expect(seeds.dig(0, 0, :name)).to eq 'spinach'
+ end
+ end
+ end
+
+ context 'when variables policy is specified' do
+ let(:config) do
+ {
+ unit: { script: 'minitest', only: { variables: ['$CI_PIPELINE_SOURCE'] } },
+ feature: { script: 'spinach', only: { variables: ['$UNDEFINED'] } }
+ }
+ end
+
+ it 'returns stage seeds only when variables expression is truthy' do
+ seeds = command.stage_seeds
+
+ expect(seeds.size).to eq 1
+ expect(seeds.dig(0, 0, :name)).to eq 'unit'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
deleted file mode 100644
index 79acd3e4f54..00000000000
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
-
- let(:command) do
- Gitlab::Ci::Pipeline::Chain::Command.new(
- project: project,
- current_user: user,
- save_incompleted: true)
- end
-
- let!(:step) { described_class.new(pipeline, command) }
-
- before do
- step.perform!
- end
-
- context 'when pipeline has no YAML configuration' do
- let(:pipeline) do
- build_stubbed(:ci_pipeline, project: project)
- end
-
- it 'appends errors about missing configuration' do
- expect(pipeline.errors.to_a)
- .to include 'Missing .gitlab-ci.yml file'
- end
-
- it 'breaks the chain' do
- expect(step.break?).to be true
- end
- end
-
- context 'when YAML configuration contains errors' do
- let(:pipeline) do
- build(:ci_pipeline, project: project, config: 'invalid YAML')
- end
-
- it 'appends errors about YAML errors' do
- expect(pipeline.errors.to_a)
- .to include 'Invalid configuration format'
- end
-
- it 'breaks the chain' do
- expect(step.break?).to be true
- end
-
- context 'when saving incomplete pipeline is allowed' do
- let(:command) do
- double('command', project: project,
- current_user: user,
- save_incompleted: true)
- end
-
- it 'fails the pipeline' do
- expect(pipeline.reload).to be_failed
- end
-
- it 'sets a config error failure reason' do
- expect(pipeline.reload.config_error?).to eq true
- end
- end
-
- context 'when saving incomplete pipeline is not allowed' do
- let(:command) do
- double('command', project: project,
- current_user: user,
- save_incompleted: false)
- end
-
- it 'does not drop pipeline' do
- expect(pipeline).not_to be_failed
- expect(pipeline).not_to be_persisted
- end
- end
- end
-
- context 'when pipeline contains configuration validation errors' do
- let(:config) do
- {
- rspec: {
- before_script: 10,
- script: 'ls -al'
- }
- }
- end
-
- let(:pipeline) do
- build(:ci_pipeline, project: project, config: config)
- end
-
- it 'appends configuration validation errors to pipeline errors' do
- expect(pipeline.errors.to_a)
- .to include "jobs:rspec:before_script config should be an array of strings"
- end
-
- it 'breaks the chain' do
- expect(step.break?).to be true
- end
- end
-
- context 'when pipeline is correct and complete' do
- let(:pipeline) do
- build(:ci_pipeline_with_one_job, project: project)
- end
-
- it 'does not invalidate the pipeline' do
- expect(pipeline).to be_valid
- end
-
- it 'does not break the chain' do
- expect(step.break?).to be false
- end
- end
-
- context 'when pipeline source is merge request' do
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(config))
- end
-
- let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
-
- let(:merge_request_pipeline) do
- build(:ci_pipeline, source: :merge_request_event, project: project)
- end
-
- let(:chain) { described_class.new(merge_request_pipeline, command).tap(&:perform!) }
-
- context "when config contains 'merge_requests' keyword" do
- let(:config) { { rspec: { script: 'echo', only: ['merge_requests'] } } }
-
- it 'does not break the chain' do
- expect(chain).not_to be_break
- end
- end
-
- context "when config contains 'merge_request' keyword" do
- let(:config) { { rspec: { script: 'echo', only: ['merge_request'] } } }
-
- it 'does not break the chain' do
- expect(chain).not_to be_break
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
new file mode 100644
index 00000000000..6a8b804597c
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -0,0 +1,261 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:head_sha) { project.repository.head_commit.id }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: head_sha) }
+
+ let(:processor) { described_class.new(pipeline, config) }
+
+ describe '#build_attributes' do
+ subject { processor.build_attributes }
+
+ context 'with cache:key' do
+ let(:config) do
+ {
+ key: 'a-key',
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it { is_expected.to include(options: { cache: config }) }
+ end
+
+ context 'with cache:key as a symbol' do
+ let(:config) do
+ {
+ key: :a_key,
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it { is_expected.to include(options: { cache: config.merge(key: "a_key") }) }
+ end
+
+ context 'with cache:key:files' do
+ shared_examples 'default key' do
+ let(:config) do
+ { key: { files: files } }
+ end
+
+ it 'uses default key' do
+ expected = { options: { cache: { key: 'default' } } }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ shared_examples 'version and gemfile files' do
+ let(:config) do
+ {
+ key: {
+ files: files
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'builds a string key' do
+ expected = {
+ options: {
+ cache: {
+ key: '703ecc8fef1635427a1f86a8a1a308831c122392',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with existing files' do
+ let(:files) { ['VERSION', 'Gemfile.zip'] }
+
+ it_behaves_like 'version and gemfile files'
+ end
+
+ context 'with files starting with ./' do
+ let(:files) { ['Gemfile.zip', './VERSION'] }
+
+ it_behaves_like 'version and gemfile files'
+ end
+
+ context 'with feature flag disabled' do
+ let(:files) { ['VERSION', 'Gemfile.zip'] }
+
+ before do
+ stub_feature_flags(ci_file_based_cache: false)
+ end
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with files ending with /' do
+ let(:files) { ['Gemfile.zip/'] }
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with new line in filenames' do
+ let(:files) { ["Gemfile.zip\nVERSION"] }
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with missing files' do
+ let(:files) { ['project-gemfile.lock', ''] }
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with directories' do
+ shared_examples 'foo/bar directory key' do
+ let(:config) do
+ {
+ key: {
+ files: files
+ }
+ }
+ end
+
+ it 'builds a string key' do
+ expected = {
+ options: {
+ cache: { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with directory' do
+ let(:files) { ['foo/bar'] }
+
+ it_behaves_like 'foo/bar directory key'
+ end
+
+ context 'with directory ending in slash' do
+ let(:files) { ['foo/bar/'] }
+
+ it_behaves_like 'foo/bar directory key'
+ end
+
+ context 'with directories ending in slash star' do
+ let(:files) { ['foo/bar/*'] }
+
+ it_behaves_like 'foo/bar directory key'
+ end
+ end
+ end
+
+ context 'with cache:key:prefix' do
+ context 'without files' do
+ let(:config) do
+ {
+ key: {
+ prefix: 'a-prefix'
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'adds prefix to default key' do
+ expected = {
+ options: {
+ cache: {
+ key: 'a-prefix-default',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with existing files' do
+ let(:config) do
+ {
+ key: {
+ files: ['VERSION', 'Gemfile.zip'],
+ prefix: 'a-prefix'
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'adds prefix key' do
+ expected = {
+ options: {
+ cache: {
+ key: 'a-prefix-703ecc8fef1635427a1f86a8a1a308831c122392',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with missing files' do
+ let(:config) do
+ {
+ key: {
+ files: ['project-gemfile.lock', ''],
+ prefix: 'a-prefix'
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'adds prefix to default key' do
+ expected = {
+ options: {
+ cache: {
+ key: 'a-prefix-default',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+ end
+
+ context 'with all cache option keys' do
+ let(:config) do
+ {
+ key: 'a-key',
+ paths: ['vendor/ruby'],
+ untracked: true,
+ policy: 'push'
+ }
+ end
+
+ it { is_expected.to include(options: { cache: config }) }
+ end
+
+ context 'with unknown cache option keys' do
+ let(:config) do
+ {
+ key: 'a-key',
+ unknown_key: true
+ }
+ end
+
+ it { expect { subject }.to raise_error(ArgumentError, /unknown_key/) }
+ end
+
+ context 'with empty config' do
+ let(:config) { {} }
+
+ it { is_expected.to include(options: {}) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 945baf47b7b..53dcb6359fe 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
describe Gitlab::Ci::Pipeline::Seed::Build do
let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ let(:head_sha) { project.repository.head_commit.id }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:attributes) { { name: 'rspec', ref: 'master' } }
let(:previous_stages) { [] }
@@ -69,6 +70,101 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
it { is_expected.to include(when: 'never') }
end
end
+
+ context 'with cache:key' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: 'a-value'
+ }
+ }
+ end
+
+ it { is_expected.to include(options: { cache: { key: 'a-value' } }) }
+ end
+
+ context 'with cache:key:files' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: {
+ files: ['VERSION']
+ }
+ }
+ }
+ end
+
+ it 'includes cache options' do
+ cache_options = {
+ options: {
+ cache: {
+ key: 'f155568ad0933d8358f66b846133614f76dd0ca4'
+ }
+ }
+ }
+
+ is_expected.to include(cache_options)
+ end
+ end
+
+ context 'with cache:key:prefix' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: {
+ prefix: 'something'
+ }
+ }
+ }
+ end
+
+ it { is_expected.to include(options: { cache: { key: 'something-default' } }) }
+ end
+
+ context 'with cache:key:files and prefix' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: {
+ files: ['VERSION'],
+ prefix: 'something'
+ }
+ }
+ }
+ end
+
+ it 'includes cache options' do
+ cache_options = {
+ options: {
+ cache: {
+ key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4'
+ }
+ }
+ }
+
+ is_expected.to include(cache_options)
+ end
+ end
+
+ context 'with empty cache' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {}
+ }
+ end
+
+ it { is_expected.to include(options: {}) }
+ end
end
describe '#bridge?' do
@@ -773,10 +869,4 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
end
-
- describe '#scoped_variables_hash' do
- subject { seed_build.scoped_variables_hash }
-
- it { is_expected.to eq(seed_build.to_resource.scoped_variables_hash) }
- end
end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index 1725d954b92..857483a9e0a 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Ci::Status::Composite do
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index 1baea13299b..45b59541ce6 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
@@ -100,7 +102,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
describe '#append' do
shared_examples_for 'appends' do
it "truncates and append content" do
- stream.append("89", 4)
+ stream.append(+"89", 4)
stream.seek(0)
expect(stream.size).to eq(6)
@@ -108,7 +110,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
end
it 'appends in binary mode' do
- '😺'.force_encoding('ASCII-8BIT').each_char.with_index do |byte, offset|
+ (+'😺').force_encoding('ASCII-8BIT').each_char.with_index do |byte, offset|
stream.append(byte, offset)
end
@@ -154,7 +156,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
describe '#set' do
shared_examples_for 'sets' do
before do
- stream.set("8901")
+ stream.set(+"8901")
end
it "overwrite content" do
@@ -168,7 +170,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is StringIO' do
let(:stream) do
described_class.new do
- StringIO.new("12345678")
+ StringIO.new(+"12345678")
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index cb5ebde16d7..4b1c7483b11 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -108,6 +108,25 @@ module Gitlab
it { expect(subject[:interruptible]).to be_falsy }
end
+
+ it "returns interruptible when overridden for job" do
+ config = YAML.dump({ default: { interruptible: true },
+ rspec: { script: "rspec" } })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.stage_builds_attributes("test").size).to eq(1)
+ expect(config_processor.stage_builds_attributes("test").first).to eq({
+ stage: "test",
+ stage_idx: 2,
+ name: "rspec",
+ options: { script: ["rspec"] },
+ interruptible: true,
+ allow_failure: false,
+ when: "on_success",
+ yaml_variables: []
+ })
+ end
end
describe 'retry entry' do
@@ -249,6 +268,108 @@ module Gitlab
end
end
+ describe '#workflow_attributes' do
+ context 'with disallowed workflow:variables' do
+ let(:config) do
+ <<-EOYML
+ workflow:
+ rules:
+ - if: $VAR == "value"
+ variables:
+ UNSUPPORTED: "unparsed"
+ EOYML
+ end
+
+ it 'parses the workflow:rules configuration' do
+ expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'workflow config contains unknown keys: variables')
+ end
+ end
+
+ context 'with rules and variables' do
+ let(:config) do
+ <<-EOYML
+ variables:
+ SUPPORTED: "parsed"
+
+ workflow:
+ rules:
+ - if: $VAR == "value"
+
+ hello:
+ script: echo world
+ EOYML
+ end
+
+ it 'parses the workflow:rules configuration' do
+ expect(subject.workflow_attributes[:rules]).to contain_exactly({ if: '$VAR == "value"' })
+ end
+
+ it 'parses the root:variables as yaml_variables:' do
+ expect(subject.workflow_attributes[:yaml_variables])
+ .to contain_exactly({ key: 'SUPPORTED', value: 'parsed', public: true })
+ end
+ end
+
+ context 'with rules and no variables' do
+ let(:config) do
+ <<-EOYML
+ workflow:
+ rules:
+ - if: $VAR == "value"
+
+ hello:
+ script: echo world
+ EOYML
+ end
+
+ it 'parses the workflow:rules configuration' do
+ expect(subject.workflow_attributes[:rules]).to contain_exactly({ if: '$VAR == "value"' })
+ end
+
+ it 'parses the root:variables as yaml_variables:' do
+ expect(subject.workflow_attributes[:yaml_variables]).to eq([])
+ end
+ end
+
+ context 'with variables and no rules' do
+ let(:config) do
+ <<-EOYML
+ variables:
+ SUPPORTED: "parsed"
+
+ hello:
+ script: echo world
+ EOYML
+ end
+
+ it 'parses the workflow:rules configuration' do
+ expect(subject.workflow_attributes[:rules]).to be_nil
+ end
+
+ it 'parses the root:variables as yaml_variables:' do
+ expect(subject.workflow_attributes[:yaml_variables])
+ .to contain_exactly({ key: 'SUPPORTED', value: 'parsed', public: true })
+ end
+ end
+
+ context 'with no rules and no variables' do
+ let(:config) do
+ <<-EOYML
+ hello:
+ script: echo world
+ EOYML
+ end
+
+ it 'parses the workflow:rules configuration' do
+ expect(subject.workflow_attributes[:rules]).to be_nil
+ end
+
+ it 'parses the root:variables as yaml_variables:' do
+ expect(subject.workflow_attributes[:yaml_variables]).to eq([])
+ end
+ end
+ end
+
describe 'only / except policies validations' do
context 'when `only` has an invalid value' do
let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
@@ -330,7 +451,7 @@ module Gitlab
}
end
- it "return commands with scripts concencaced" do
+ it "return commands with scripts concatenated" do
expect(subject[:options][:before_script]).to eq(["global script"])
end
end
@@ -343,7 +464,7 @@ module Gitlab
}
end
- it "return commands with scripts concencaced" do
+ it "return commands with scripts concatenated" do
expect(subject[:options][:before_script]).to eq(["global script"])
end
end
@@ -356,21 +477,48 @@ module Gitlab
}
end
- it "return commands with scripts concencaced" do
+ it "return commands with scripts concatenated" do
expect(subject[:options][:before_script]).to eq(["local script"])
end
end
+
+ context 'when script is array of arrays of strings' do
+ let(:config) do
+ {
+ before_script: [["global script", "echo 1"], ["ls"], "pwd"],
+ test: { script: ["script"] }
+ }
+ end
+
+ it "return commands with scripts concatenated" do
+ expect(subject[:options][:before_script]).to eq(["global script", "echo 1", "ls", "pwd"])
+ end
+ end
end
describe "script" do
- let(:config) do
- {
- test: { script: ["script"] }
- }
+ context 'when script is array of strings' do
+ let(:config) do
+ {
+ test: { script: ["script"] }
+ }
+ end
+
+ it "return commands with scripts concatenated" do
+ expect(subject[:options][:script]).to eq(["script"])
+ end
end
- it "return commands with scripts concencaced" do
- expect(subject[:options][:script]).to eq(["script"])
+ context 'when script is array of arrays of strings' do
+ let(:config) do
+ {
+ test: { script: [["script"], ["echo 1"], "ls"] }
+ }
+ end
+
+ it "return commands with scripts concatenated" do
+ expect(subject[:options][:script]).to eq(["script", "echo 1", "ls"])
+ end
end
end
@@ -413,6 +561,19 @@ module Gitlab
expect(subject[:options][:after_script]).to eq(["local after_script"])
end
end
+
+ context 'when script is array of arrays of strings' do
+ let(:config) do
+ {
+ after_script: [["global script", "echo 1"], ["ls"], "pwd"],
+ test: { script: ["script"] }
+ }
+ end
+
+ it "return after_script in options" do
+ expect(subject[:options][:after_script]).to eq(["global script", "echo 1", "ls", "pwd"])
+ end
+ end
end
end
@@ -891,7 +1052,7 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
@@ -903,7 +1064,7 @@ module Gitlab
config = YAML.dump(
{
default: {
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' }
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: { files: ['file'] } }
},
rspec: {
script: "rspec"
@@ -913,33 +1074,79 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
- key: 'key',
+ key: { files: ['file'] },
policy: 'pull-push'
)
end
- it "returns cache when defined in a job" do
+ it 'returns cache key when defined in a job' do
config = YAML.dump({
rspec: {
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- script: "rspec"
+ cache: { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' },
+ script: 'rspec'
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
- paths: ["logs/", "binaries/"],
+ expect(config_processor.stage_builds_attributes('test').size).to eq(1)
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ paths: ['logs/', 'binaries/'],
untracked: true,
key: 'key',
policy: 'pull-push'
)
end
+ it 'returns cache files' do
+ config = YAML.dump(
+ rspec: {
+ cache: {
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'] }
+ },
+ script: 'rspec'
+ }
+ )
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.stage_builds_attributes('test').size).to eq(1)
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'] },
+ policy: 'pull-push'
+ )
+ end
+
+ it 'returns cache files with prefix' do
+ config = YAML.dump(
+ rspec: {
+ cache: {
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'], prefix: 'prefix' }
+ },
+ script: 'rspec'
+ }
+ )
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.stage_builds_attributes('test').size).to eq(1)
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'], prefix: 'prefix' },
+ policy: 'pull-push'
+ )
+ end
+
it "overwrite cache when defined for a job and globally" do
config = YAML.dump({
cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
@@ -952,7 +1159,7 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
paths: ["test/"],
untracked: false,
key: 'local',
@@ -970,6 +1177,7 @@ module Gitlab
rspec: {
artifacts: {
paths: ["logs/", "binaries/"],
+ expose_as: "Exposed artifacts",
untracked: true,
name: "custom_name",
expire_in: "7d"
@@ -993,6 +1201,7 @@ module Gitlab
artifacts: {
name: "custom_name",
paths: ["logs/", "binaries/"],
+ expose_as: "Exposed artifacts",
untracked: true,
expire_in: "7d"
}
@@ -1251,7 +1460,7 @@ module Gitlab
end
end
- describe "Needs" do
+ describe "Job Needs" do
let(:needs) { }
let(:dependencies) { }
@@ -1259,6 +1468,7 @@ module Gitlab
{
build1: { stage: 'build', script: 'test' },
build2: { stage: 'build', script: 'test' },
+ parallel: { stage: 'build', script: 'test', parallel: 2 },
test1: { stage: 'test', script: 'test', needs: needs, dependencies: dependencies },
test2: { stage: 'test', script: 'test' },
deploy: { stage: 'test', script: 'test' }
@@ -1275,7 +1485,7 @@ module Gitlab
let(:needs) { %w(build1 build2) }
it "does create jobs with valid specification" do
- expect(subject.builds.size).to eq(5)
+ expect(subject.builds.size).to eq(7)
expect(subject.builds[0]).to eq(
stage: "build",
stage_idx: 1,
@@ -1287,16 +1497,11 @@ module Gitlab
allow_failure: false,
yaml_variables: []
)
- expect(subject.builds[2]).to eq(
+ expect(subject.builds[4]).to eq(
stage: "test",
stage_idx: 2,
name: "test1",
- options: {
- script: ["test"],
- # This does not make sense, there is a follow-up:
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/65569
- bridge_needs: %w[build1 build2]
- },
+ options: { script: ["test"] },
needs_attributes: [
{ name: "build1" },
{ name: "build2" }
@@ -1308,10 +1513,25 @@ module Gitlab
end
end
- context 'needs two builds defined as symbols' do
- let(:needs) { [:build1, :build2] }
+ context 'needs parallel job' do
+ let(:needs) { %w(parallel) }
- it { expect { subject }.not_to raise_error }
+ it "does create jobs with valid specification" do
+ expect(subject.builds.size).to eq(7)
+ expect(subject.builds[4]).to eq(
+ stage: "test",
+ stage_idx: 2,
+ name: "test1",
+ options: { script: ["test"] },
+ needs_attributes: [
+ { name: "parallel 1/2" },
+ { name: "parallel 2/2" }
+ ],
+ when: "on_success",
+ allow_failure: false,
+ yaml_variables: []
+ )
+ end
end
context 'undefined need' do
@@ -1545,28 +1765,42 @@ module Gitlab
config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array of strings")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array containing strings and arrays of strings")
end
it "returns errors if job before_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array of strings")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array containing strings and arrays of strings")
+ end
+
+ it "returns errors if job before_script parameter is multi-level nested array of strings" do
+ config = YAML.dump({ rspec: { script: "test", before_script: [["ls", ["pwd"]], "test"] } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array containing strings and arrays of strings")
end
it "returns errors if after_script parameter is invalid" do
config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array of strings")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array containing strings and arrays of strings")
end
it "returns errors if job after_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array of strings")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array containing strings and arrays of strings")
+ end
+
+ it "returns errors if job after_script parameter is multi-level nested array of strings" do
+ config = YAML.dump({ rspec: { script: "test", after_script: [["ls", ["pwd"]], "test"] } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array containing strings and arrays of strings")
end
it "returns errors if image parameter is invalid" do
@@ -1776,14 +2010,42 @@ module Gitlab
config = YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:key config should be a string or symbol")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:key should be a hash, a string or a symbol")
end
it "returns errors if job cache:key is not an a string" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:key config should be a string or symbol")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:key should be a hash, a string or a symbol")
+ end
+
+ it 'returns errors if job cache:key:files is not an array of strings' do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [1] } } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key:files config should be an array of strings')
+ end
+
+ it 'returns errors if job cache:key:files is an empty array' do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [] } } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key:files config requires at least 1 item')
+ end
+
+ it 'returns errors if job defines only cache:key:prefix' do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 'prefix-key' } } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key config missing required keys: files')
+ end
+
+ it 'returns errors if job cache:key:prefix is not an a string' do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 1, files: ['file'] } } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key:prefix config should be a string or symbol')
end
it "returns errors if job cache:untracked is not an array of strings" do
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
index 974cc2c4660..fc9792e16d7 100644
--- a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
@@ -21,11 +21,10 @@ describe Gitlab::Cleanup::OrphanJobArtifactFiles do
end
it 'errors when invalid niceness is given' do
+ allow(Gitlab::Utils).to receive(:which).with('ionice').and_return('/fake/ionice')
cleanup = described_class.new(logger: null_logger, niceness: 'FooBar')
- expect(null_logger).to receive(:error).with(/FooBar/)
-
- cleanup.run!
+ expect { cleanup.run! }.to raise_error('Invalid niceness')
end
it 'finds artifacts on disk' do
@@ -63,6 +62,8 @@ describe Gitlab::Cleanup::OrphanJobArtifactFiles do
def mock_artifacts_found(cleanup, *files)
mock = allow(cleanup).to receive(:find_artifacts)
- files.each { |file| mock.and_yield(file) }
+ # Because we shell out to run `find -L ...`, each file actually
+ # contains a trailing newline
+ files.each { |file| mock.and_yield("#{file}\n") }
end
end
diff --git a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
index 1eddf488c5d..b8ac8c5b95c 100644
--- a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
@@ -8,15 +8,28 @@ describe Gitlab::Cluster::Mixins::PumaCluster do
PUMA_STARTUP_TIMEOUT = 30
context 'when running Puma in Cluster-mode' do
- %i[USR1 USR2 INT HUP].each do |signal|
- it "for #{signal} does execute phased restart block" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:signal, :exitstatus, :termsig) do
+ # executes phased restart block
+ :USR1 | 140 | nil
+ :USR2 | 140 | nil
+ :INT | 140 | nil
+ :HUP | 140 | nil
+
+ # does not execute phased restart block
+ :TERM | nil | 15
+ end
+
+ with_them do
+ it 'properly handles process lifecycle' do
with_puma(workers: 1) do |pid|
Process.kill(signal, pid)
child_pid, child_status = Process.wait2(pid)
expect(child_pid).to eq(pid)
- expect(child_status).to be_exited
- expect(child_status.exitstatus).to eq(140)
+ expect(child_status.exitstatus).to eq(exitstatus)
+ expect(child_status.termsig).to eq(termsig)
end
end
end
@@ -62,8 +75,12 @@ describe Gitlab::Cluster::Mixins::PumaCluster do
Puma::Cluster.prepend(#{described_class})
- Gitlab::Cluster::LifecycleEvents.on_before_phased_restart do
- exit(140)
+ mutex = Mutex.new
+
+ Gitlab::Cluster::LifecycleEvents.on_before_blackout_period do
+ mutex.synchronize do
+ exit(140)
+ end
end
# redirect stderr to stdout
diff --git a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
index 2b3a267991c..ebe019924d5 100644
--- a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
@@ -5,31 +5,30 @@ require 'spec_helper'
# For easier debugging set `UNICORN_DEBUG=1`
describe Gitlab::Cluster::Mixins::UnicornHttpServer do
- UNICORN_STARTUP_TIMEOUT = 10
+ UNICORN_STARTUP_TIMEOUT = 30
context 'when running Unicorn' do
- %i[USR2].each do |signal|
- it "for #{signal} does execute phased restart block" do
- with_unicorn(workers: 1) do |pid|
- Process.kill(signal, pid)
+ using RSpec::Parameterized::TableSyntax
- child_pid, child_status = Process.wait2(pid)
- expect(child_pid).to eq(pid)
- expect(child_status).to be_exited
- expect(child_status.exitstatus).to eq(140)
- end
- end
+ where(:signal, :exitstatus, :termsig) do
+ # executes phased restart block
+ :USR2 | 140 | nil
+ :QUIT | 140 | nil
+
+ # does not execute phased restart block
+ :INT | 0 | nil
+ :TERM | 0 | nil
end
- %i[QUIT TERM INT].each do |signal|
- it "for #{signal} does not execute phased restart block" do
+ with_them do
+ it 'properly handles process lifecycle' do
with_unicorn(workers: 1) do |pid|
Process.kill(signal, pid)
child_pid, child_status = Process.wait2(pid)
expect(child_pid).to eq(pid)
- expect(child_status).to be_exited
- expect(child_status.exitstatus).to eq(0)
+ expect(child_status.exitstatus).to eq(exitstatus)
+ expect(child_status.termsig).to eq(termsig)
end
end
end
@@ -74,8 +73,12 @@ describe Gitlab::Cluster::Mixins::UnicornHttpServer do
Unicorn::HttpServer.prepend(#{described_class})
- Gitlab::Cluster::LifecycleEvents.on_before_phased_restart do
- exit(140)
+ mutex = Mutex.new
+
+ Gitlab::Cluster::LifecycleEvents.on_before_blackout_period do
+ mutex.synchronize do
+ exit(140)
+ end
end
# redirect stderr to stdout
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index a163de07967..9eee7e89062 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -129,7 +129,7 @@ describe 'cycle analytics events' do
end
end
- describe '#test_events' do
+ describe '#test_events', :sidekiq_might_not_need_inline do
let(:stage) { :test }
let(:merge_request) { MergeRequest.first }
@@ -234,7 +234,7 @@ describe 'cycle analytics events' do
end
end
- describe '#staging_events' do
+ describe '#staging_events', :sidekiq_might_not_need_inline do
let(:stage) { :staging }
let(:merge_request) { MergeRequest.first }
@@ -306,7 +306,7 @@ describe 'cycle analytics events' do
end
end
- describe '#production_events' do
+ describe '#production_events', :sidekiq_might_not_need_inline do
let(:stage) { :production }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
diff --git a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
index d5c2f7cc579..664009f140f 100644
--- a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
@@ -44,6 +44,14 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
expect(subject.first[:value]).to eq(2)
end
end
+
+ context 'when `from` and `to` parameters are provided' do
+ subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data }
+
+ it 'finds issues from 5 days ago' do
+ expect(subject.first[:value]).to eq(2)
+ end
+ end
end
context 'with other projects' do
@@ -97,6 +105,14 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
expect(subject.second[:value]).to eq(2)
end
end
+
+ context 'when `from` and `to` parameters are provided' do
+ subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data }
+
+ it 'finds deployments from 5 days ago' do
+ expect(subject.second[:value]).to eq(2)
+ end
+ end
end
context 'with other projects' do
diff --git a/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb b/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb
index e568ea633db..d4ab9bc225b 100644
--- a/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb
@@ -71,7 +71,7 @@ describe Gitlab::CycleAnalytics::UsageData do
}
end
- it 'returns the aggregated usage data of every selected project' do
+ it 'returns the aggregated usage data of every selected project', :sidekiq_might_not_need_inline do
result = subject.to_json
expect(result).to have_key(:avg_cycle_analytics)
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 1696d3566ad..8056418e697 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -178,6 +178,7 @@ describe Gitlab::Danger::Helper do
'app/assets/foo' | :frontend
'app/views/foo' | :frontend
'public/foo' | :frontend
+ 'scripts/frontend/foo' | :frontend
'spec/javascripts/foo' | :frontend
'spec/frontend/bar' | :frontend
'vendor/assets/foo' | :frontend
@@ -193,10 +194,8 @@ describe Gitlab::Danger::Helper do
'app/models/foo' | :backend
'bin/foo' | :backend
'config/foo' | :backend
- 'danger/foo' | :backend
'lib/foo' | :backend
'rubocop/foo' | :backend
- 'scripts/foo' | :backend
'spec/foo' | :backend
'spec/foo/bar' | :backend
@@ -209,16 +208,24 @@ describe Gitlab::Danger::Helper do
'vendor/languages.yml' | :backend
'vendor/licenses.csv' | :backend
- 'Dangerfile' | :backend
'Gemfile' | :backend
'Gemfile.lock' | :backend
'Procfile' | :backend
'Rakefile' | :backend
'FOO_VERSION' | :backend
+ 'Dangerfile' | :engineering_productivity
+ 'danger/commit_messages/Dangerfile' | :engineering_productivity
+ 'ee/danger/commit_messages/Dangerfile' | :engineering_productivity
+ 'danger/commit_messages/' | :engineering_productivity
+ 'ee/danger/commit_messages/' | :engineering_productivity
'.gitlab-ci.yml' | :engineering_productivity
'.gitlab/ci/cng.gitlab-ci.yml' | :engineering_productivity
'.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | :engineering_productivity
+ 'scripts/foo' | :engineering_productivity
+ 'lib/gitlab/danger/foo' | :engineering_productivity
+ 'ee/lib/gitlab/danger/foo' | :engineering_productivity
+
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend
'ee/FOO_VERSION' | :unknown
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index bd1c2b10dc8..35edfa08a63 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::Danger::Teammate do
expect(subject.maintainer?(project, :frontend, labels)).to be_truthy
end
- context 'when labels contain Create and the category is test' do
+ context 'when labels contain devops::create and the category is test' do
let(:labels) { ['devops::create'] }
context 'when role is Test Automation Engineer, Create' do
@@ -79,6 +79,22 @@ describe Gitlab::Danger::Teammate do
it '#maintainer? returns false' do
expect(subject.maintainer?(project, :engineering_productivity, labels)).to be_falsey
end
+
+ context 'when capabilities include maintainer backend' do
+ let(:capabilities) { ['maintainer backend'] }
+
+ it '#maintainer? returns true' do
+ expect(subject.maintainer?(project, :engineering_productivity, labels)).to be_truthy
+ end
+ end
+
+ context 'when capabilities include trainee_maintainer backend' do
+ let(:capabilities) { ['trainee_maintainer backend'] }
+
+ it '#traintainer? returns true' do
+ expect(subject.traintainer?(project, :engineering_productivity, labels)).to be_truthy
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 0a6e2302b09..42d7329494d 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -35,5 +35,12 @@ describe Gitlab::DataBuilder::Deployment do
expect(data[:commit_url]).to eq(expected_commit_url)
expect(data[:commit_title]).to eq(commit.title)
end
+
+ it 'does not include the deployable URL when there is no deployable' do
+ deployment = create(:deployment, status: :failed, deployable: nil)
+ data = described_class.build(deployment)
+
+ expect(data[:deployable_url]).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb
index 58509b69463..cbc03fc38eb 100644
--- a/spec/lib/gitlab/data_builder/push_spec.rb
+++ b/spec/lib/gitlab/data_builder/push_spec.rb
@@ -57,6 +57,32 @@ describe Gitlab::DataBuilder::Push do
include_examples 'deprecated repository hook data'
end
+ describe '.sample_data' do
+ let(:data) { described_class.sample_data }
+
+ it { expect(data).to be_a(Hash) }
+ it { expect(data[:before]).to eq('95790bf891e76fee5e1747ab589903a6a1f80f22') }
+ it { expect(data[:after]).to eq('da1560886d4f094c3e6c9ef40349f7d38b5d27d7') }
+ it { expect(data[:ref]).to eq('refs/heads/master') }
+ it { expect(data[:project_id]).to eq(15) }
+ it { expect(data[:commits].size).to eq(1) }
+ it { expect(data[:total_commits_count]).to eq(1) }
+ it 'contains project data' do
+ expect(data[:project]).to be_a(Hash)
+ expect(data[:project][:id]).to eq(15)
+ expect(data[:project][:name]).to eq('gitlab')
+ expect(data[:project][:description]).to eq('')
+ expect(data[:project][:web_url]).to eq('http://test.example.com/gitlab/gitlab')
+ expect(data[:project][:avatar_url]).to eq('https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80')
+ expect(data[:project][:git_http_url]).to eq('http://test.example.com/gitlab/gitlab.git')
+ expect(data[:project][:git_ssh_url]).to eq('git@test.example.com:gitlab/gitlab.git')
+ expect(data[:project][:namespace]).to eq('gitlab')
+ expect(data[:project][:visibility_level]).to eq(0)
+ expect(data[:project][:path_with_namespace]).to eq('gitlab/gitlab')
+ expect(data[:project][:default_branch]).to eq('master')
+ end
+ end
+
describe '.build' do
let(:data) do
described_class.build(
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 49f92f14559..449eee7a371 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -142,7 +142,6 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:index_exists?).and_return(true)
allow(model).to receive(:disable_statement_timeout).and_call_original
- allow(model).to receive(:supports_drop_index_concurrently?).and_return(true)
end
describe 'by column name' do
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index aab6fbcbbd1..5b1a17e734d 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -164,15 +164,6 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
it_behaves_like 'has prometheus service', 'http://localhost:9090'
-
- it 'does not overwrite the existing whitelist' do
- application_setting.outbound_local_requests_whitelist = ['example.com']
-
- expect(result[:status]).to eq(:success)
- expect(application_setting.outbound_local_requests_whitelist).to contain_exactly(
- 'example.com', 'localhost'
- )
- end
end
context 'with non default prometheus address' do
diff --git a/spec/lib/gitlab/devise_failure_spec.rb b/spec/lib/gitlab/devise_failure_spec.rb
new file mode 100644
index 00000000000..eee05c7befd
--- /dev/null
+++ b/spec/lib/gitlab/devise_failure_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DeviseFailure do
+ let(:env) do
+ {
+ 'REQUEST_URI' => 'http://test.host/',
+ 'HTTP_HOST' => 'test.host',
+ 'REQUEST_METHOD' => 'GET',
+ 'warden.options' => { scope: :user },
+ 'rack.session' => {},
+ 'rack.session.options' => {},
+ 'rack.input' => "",
+ 'warden' => OpenStruct.new(message: nil)
+ }
+ end
+
+ let(:response) { described_class.call(env).to_a }
+ let(:request) { ActionDispatch::Request.new(env) }
+
+ context 'When redirecting' do
+ it 'sets the expire_after key' do
+ response
+
+ expect(env['rack.session.options']).to have_key(:expire_after)
+ end
+
+ it 'returns to the default redirect location' do
+ expect(response.first).to eq(302)
+ expect(request.flash[:alert]).to eq('You need to sign in or sign up before continuing.')
+ expect(response.second['Location']).to eq('http://test.host/users/sign_in')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
index 35aa663b0a5..a65214fab61 100644
--- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
+++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index c3b706fc538..747fe369c78 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb
index aed7d8d81ce..0739f622af5 100644
--- a/spec/lib/gitlab/exclusive_lease_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 2e5fd16d370..9be6ace3be5 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -2,81 +2,194 @@
require 'spec_helper'
-describe Gitlab::Experimentation::ControllerConcern, type: :controller do
- controller(ApplicationController) do
- include Gitlab::Experimentation::ControllerConcern
+describe Gitlab::Experimentation do
+ before do
+ stub_const('Gitlab::Experimentation::EXPERIMENTS', {
+ test_experiment: {
+ feature_toggle: feature_toggle,
+ environment: environment,
+ enabled_ratio: enabled_ratio,
+ tracking_category: 'Team'
+ }
+ })
- def index
- head :ok
- end
+ stub_feature_flags(feature_toggle => true)
end
- describe '#set_experimentation_subject_id_cookie' do
- before do
- get :index
+ let(:feature_toggle) { :test_experiment_toggle }
+ let(:environment) { Rails.env.test? }
+ let(:enabled_ratio) { 0.1 }
+
+ describe Gitlab::Experimentation::ControllerConcern, type: :controller do
+ controller(ApplicationController) do
+ include Gitlab::Experimentation::ControllerConcern
+
+ def index
+ head :ok
+ end
end
- context 'cookie is present' do
+ describe '#set_experimentation_subject_id_cookie' do
before do
- cookies[:experimentation_subject_id] = 'test'
+ get :index
end
- it 'does not change the cookie' do
- expect(cookies[:experimentation_subject_id]).to eq 'test'
+ context 'cookie is present' do
+ before do
+ cookies[:experimentation_subject_id] = 'test'
+ end
+
+ it 'does not change the cookie' do
+ expect(cookies[:experimentation_subject_id]).to eq 'test'
+ end
end
- end
- context 'cookie is not present' do
- it 'sets a permanent signed cookie' do
- expect(cookies.permanent.signed[:experimentation_subject_id]).to be_present
+ context 'cookie is not present' do
+ it 'sets a permanent signed cookie' do
+ expect(cookies.permanent.signed[:experimentation_subject_id]).to be_present
+ end
end
end
- end
- describe '#experiment_enabled?' do
- context 'cookie is not present' do
- it 'calls Gitlab::Experimentation.enabled? with the name of the experiment and an experimentation_subject_index of nil' do
- expect(Gitlab::Experimentation).to receive(:enabled?).with(:test_experiment, nil)
- controller.experiment_enabled?(:test_experiment)
+ describe '#experiment_enabled?' do
+ context 'cookie is not present' do
+ it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of nil' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, nil) # rubocop:disable RSpec/DescribedClass
+ controller.experiment_enabled?(:test_experiment)
+ end
+ end
+
+ context 'cookie is present' do
+ before do
+ cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
+ get :index
+ end
+
+ it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
+ # 'abcd1234'.hex % 100 = 76
+ expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, 76) # rubocop:disable RSpec/DescribedClass
+ controller.experiment_enabled?(:test_experiment)
+ end
+ end
+
+ describe 'URL parameter to force enable experiment' do
+ it 'returns true' do
+ get :index, params: { force_experiment: :test_experiment }
+
+ expect(controller.experiment_enabled?(:test_experiment)).to be_truthy
+ end
end
end
- context 'cookie is present' do
- before do
- cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
- get :index
+ describe '#track_experiment_event' do
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'tracks the event with the right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Team',
+ 'start',
+ label: nil,
+ property: 'experimental_group'
+ )
+ controller.track_experiment_event(:test_experiment, 'start')
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ stub_experiment_for_user(test_experiment: false)
+ end
+
+ it 'tracks the event with the right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Team',
+ 'start',
+ label: nil,
+ property: 'control_group'
+ )
+ controller.track_experiment_event(:test_experiment, 'start')
+ end
+ end
end
- it 'calls Gitlab::Experimentation.enabled? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
- # 'abcd1234'.hex % 100 = 76
- expect(Gitlab::Experimentation).to receive(:enabled?).with(:test_experiment, 76)
- controller.experiment_enabled?(:test_experiment)
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ end
+
+ it 'does not track the event' do
+ expect(Gitlab::Tracking).not_to receive(:event)
+ controller.track_experiment_event(:test_experiment, 'start')
+ end
end
end
- end
-end
-describe Gitlab::Experimentation do
- before do
- stub_const('Gitlab::Experimentation::EXPERIMENTS', {
- test_experiment: {
- feature_toggle: feature_toggle,
- environment: environment,
- enabled_ratio: enabled_ratio
- }
- })
+ describe '#frontend_experimentation_tracking_data' do
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
- stub_feature_flags(feature_toggle => true)
- end
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ label: nil,
+ property: 'experimental_group'
+ }
+ )
+ end
+ end
- let(:feature_toggle) { :test_experiment_toggle }
- let(:environment) { Rails.env.test? }
- let(:enabled_ratio) { 0.1 }
+ context 'the user is part of the control group' do
+ before do
+ allow_any_instance_of(described_class).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ label: nil,
+ property: 'control_group'
+ }
+ )
+ end
+ end
+ end
- describe '.enabled?' do
- subject { described_class.enabled?(:test_experiment, experimentation_subject_index) }
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ end
- let(:experimentation_subject_index) { 9 }
+ it 'does not push data to gon' do
+ expect(Gon.method_defined?(:tracking_data)).to be_falsey
+ controller.track_experiment_event(:test_experiment, 'start')
+ end
+ end
+ end
+ end
+
+ describe '.enabled?' do
+ subject { described_class.enabled?(:test_experiment) }
context 'feature toggle is enabled, we are on the right environment and we are selected' do
it { is_expected.to be_truthy }
@@ -84,7 +197,7 @@ describe Gitlab::Experimentation do
describe 'experiment is not defined' do
it 'returns false' do
- expect(described_class.enabled?(:missing_experiment, experimentation_subject_index)).to be_falsey
+ expect(described_class.enabled?(:missing_experiment)).to be_falsey
end
end
@@ -127,30 +240,52 @@ describe Gitlab::Experimentation do
it { is_expected.to be_falsey }
end
end
+ end
- describe 'enabled ratio' do
- context 'enabled ratio is not set' do
- let(:enabled_ratio) { nil }
+ describe '.enabled_for_user?' do
+ subject { described_class.enabled_for_user?(:test_experiment, experimentation_subject_index) }
- it { is_expected.to be_falsey }
+ let(:experimentation_subject_index) { 9 }
+
+ context 'experiment is disabled' do
+ before do
+ allow(described_class).to receive(:enabled?).and_return(false)
end
- context 'experimentation_subject_index is not set' do
- let(:experimentation_subject_index) { nil }
+ it { is_expected.to be_falsey }
+ end
- it { is_expected.to be_falsey }
+ context 'experiment is enabled' do
+ before do
+ allow(described_class).to receive(:enabled?).and_return(true)
end
- context 'experimentation_subject_index is an empty string' do
- let(:experimentation_subject_index) { '' }
+ it { is_expected.to be_truthy }
+
+ context 'enabled ratio is not set' do
+ let(:enabled_ratio) { nil }
it { is_expected.to be_falsey }
end
- context 'experimentation_subject_index outside enabled ratio' do
- let(:experimentation_subject_index) { 11 }
+ describe 'experimentation_subject_index' do
+ context 'experimentation_subject_index is not set' do
+ let(:experimentation_subject_index) { nil }
- it { is_expected.to be_falsey }
+ it { is_expected.to be_falsey }
+ end
+
+ context 'experimentation_subject_index is an empty string' do
+ let(:experimentation_subject_index) { '' }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'experimentation_subject_index outside enabled ratio' do
+ let(:experimentation_subject_index) { 11 }
+
+ it { is_expected.to be_falsey }
+ end
end
end
end
diff --git a/spec/lib/gitlab/external_authorization/access_spec.rb b/spec/lib/gitlab/external_authorization/access_spec.rb
index 5dc2521b310..8a08b2a6275 100644
--- a/spec/lib/gitlab/external_authorization/access_spec.rb
+++ b/spec/lib/gitlab/external_authorization/access_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/external_authorization/cache_spec.rb b/spec/lib/gitlab/external_authorization/cache_spec.rb
index 58e7d626707..1f217249f97 100644
--- a/spec/lib/gitlab/external_authorization/cache_spec.rb
+++ b/spec/lib/gitlab/external_authorization/cache_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExternalAuthorization::Cache, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/external_authorization/client_spec.rb b/spec/lib/gitlab/external_authorization/client_spec.rb
index a87f50b4586..a17d933e3bb 100644
--- a/spec/lib/gitlab/external_authorization/client_spec.rb
+++ b/spec/lib/gitlab/external_authorization/client_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExternalAuthorization::Client do
diff --git a/spec/lib/gitlab/external_authorization/logger_spec.rb b/spec/lib/gitlab/external_authorization/logger_spec.rb
index 81f1b2390e6..380e765309c 100644
--- a/spec/lib/gitlab/external_authorization/logger_spec.rb
+++ b/spec/lib/gitlab/external_authorization/logger_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExternalAuthorization::Logger do
diff --git a/spec/lib/gitlab/external_authorization/response_spec.rb b/spec/lib/gitlab/external_authorization/response_spec.rb
index 43211043eca..e1f6e9ac1fa 100644
--- a/spec/lib/gitlab/external_authorization/response_spec.rb
+++ b/spec/lib/gitlab/external_authorization/response_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExternalAuthorization::Response do
diff --git a/spec/lib/gitlab/external_authorization_spec.rb b/spec/lib/gitlab/external_authorization_spec.rb
index c45fcca3f06..97055e7b3f9 100644
--- a/spec/lib/gitlab/external_authorization_spec.rb
+++ b/spec/lib/gitlab/external_authorization_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ExternalAuthorization, :request_store do
diff --git a/spec/lib/gitlab/fake_application_settings_spec.rb b/spec/lib/gitlab/fake_application_settings_spec.rb
index c81cb83d9f4..6a872185713 100644
--- a/spec/lib/gitlab/fake_application_settings_spec.rb
+++ b/spec/lib/gitlab/fake_application_settings_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::FakeApplicationSettings do
diff --git a/spec/lib/gitlab/favicon_spec.rb b/spec/lib/gitlab/favicon_spec.rb
index 617c0f88a89..884425dab3b 100644
--- a/spec/lib/gitlab/favicon_spec.rb
+++ b/spec/lib/gitlab/favicon_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe Gitlab::Favicon, :request_store do
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
index 4ba9094b24e..f3a9f706e86 100644
--- a/spec/lib/gitlab/file_detector_spec.rb
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::FileDetector do
diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb
index b49c5817131..7ea9d43c9f7 100644
--- a/spec/lib/gitlab/file_finder_spec.rb
+++ b/spec/lib/gitlab/file_finder_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::FileFinder do
@@ -6,11 +8,11 @@ describe Gitlab::FileFinder do
subject { described_class.new(project, project.default_branch) }
it_behaves_like 'file finder' do
- let(:expected_file_by_name) { 'files/images/wm.svg' }
+ let(:expected_file_by_path) { 'files/images/wm.svg' }
let(:expected_file_by_content) { 'CHANGELOG' }
end
- it 'filters by name' do
+ it 'filters by filename' do
results = subject.find('files filename:wm.svg')
expect(results.count).to eq(1)
diff --git a/spec/lib/gitlab/fogbugz_import/client_spec.rb b/spec/lib/gitlab/fogbugz_import/client_spec.rb
index dcd1a2d9813..676511211c8 100644
--- a/spec/lib/gitlab/fogbugz_import/client_spec.rb
+++ b/spec/lib/gitlab/fogbugz_import/client_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::FogbugzImport::Client do
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index 790b0428d19..026fd1fedde 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Gfm::ReferenceRewriter do
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index eef3b9de476..5a930d44dcb 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Gfm::UploadsRewriter do
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 23651e3d7f2..cdab7127748 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -428,7 +428,9 @@ describe Gitlab::Git::Commit, :seed_helper do
end
end
- shared_examples 'extracting commit signature' do
+ describe '.extract_signature_lazily' do
+ subject { described_class.extract_signature_lazily(repository, commit_id).itself }
+
context 'when the commit is signed' do
let(:commit_id) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
@@ -492,10 +494,8 @@ describe Gitlab::Git::Commit, :seed_helper do
expect { subject }.to raise_error(ArgumentError)
end
end
- end
- describe '.extract_signature_lazily' do
- describe 'loading signatures in batch once' do
+ context 'when loading signatures in batch once' do
it 'fetches signatures in batch once' do
commit_ids = %w[0b4bc9a49b562e85de7cc9e834518ea6828729b9 4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6]
signatures = commit_ids.map do |commit_id|
@@ -516,16 +516,6 @@ describe Gitlab::Git::Commit, :seed_helper do
2.times { signatures.each(&:itself) }
end
end
-
- subject { described_class.extract_signature_lazily(repository, commit_id).itself }
-
- it_behaves_like 'extracting commit signature'
- end
-
- describe '.extract_signature' do
- subject { described_class.extract_signature(repository, commit_id) }
-
- it_behaves_like 'extracting commit signature'
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 81dc96b538a..f74cc5623c9 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitAccess do
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index 6ba65b56618..99c9369a2b9 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitAccessWiki do
diff --git a/spec/lib/gitlab/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb
index b63389af29f..1531317c514 100644
--- a/spec/lib/gitlab/git_ref_validator_spec.rb
+++ b/spec/lib/gitlab/git_ref_validator_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitRefValidator do
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index 505bc470644..fbc49e05c37 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Git do
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index a2770ef2fe4..887a6baf659 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::BlobService do
diff --git a/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb
index 742b2872c40..e88b86c71f2 100644
--- a/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::BlobsStitcher do
diff --git a/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb b/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
index c42332dc27b..c6c7fa1c38a 100644
--- a/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::CleanupService do
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 71489adb373..1abdabe17bb 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::CommitService do
diff --git a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
index a3602463756..db734b1c129 100644
--- a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::ConflictFilesStitcher do
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
index 52630ba0223..f19bcae2470 100644
--- a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::ConflictsService do
diff --git a/spec/lib/gitlab/gitaly_client/diff_spec.rb b/spec/lib/gitlab/gitaly_client/diff_spec.rb
index ec7ab2fdedb..d86497da7f5 100644
--- a/spec/lib/gitlab/gitaly_client/diff_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/diff_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::Diff do
diff --git a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb
index cd3242b9326..c9d42ad32cf 100644
--- a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::DiffStitcher do
diff --git a/spec/lib/gitlab/gitaly_client/health_check_service_spec.rb b/spec/lib/gitlab/gitaly_client/health_check_service_spec.rb
index 2c7e5eb5787..615bc80fff2 100644
--- a/spec/lib/gitlab/gitaly_client/health_check_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/health_check_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::HealthCheckService do
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index f38b8d31237..d4337c51279 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::OperationService do
@@ -209,10 +211,12 @@ describe Gitlab::GitalyClient::OperationService do
end
context 'when a create_tree_error is present' do
- let(:response) { response_class.new(create_tree_error: "something failed") }
+ let(:response) { response_class.new(create_tree_error: "something failed", create_tree_error_code: 'EMPTY') }
it 'raises a CreateTreeError' do
- expect { subject }.to raise_error(Gitlab::Git::Repository::CreateTreeError, "something failed")
+ expect { subject }.to raise_error(Gitlab::Git::Repository::CreateTreeError) do |error|
+ expect(error.error_code).to eq(:empty)
+ end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 0bb6e582159..2b4fe2ea5c0 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::RefService do
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index d5508dbff5d..929ff5dee5d 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::RemoteService do
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index f4b73931f21..503ac57ade6 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::RepositoryService do
diff --git a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
index 2f83e5a5221..a6b29489df3 100644
--- a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::StorageSettings do
diff --git a/spec/lib/gitlab/gitaly_client/util_spec.rb b/spec/lib/gitlab/gitaly_client/util_spec.rb
index 78a5e195ad1..f31b7c349ff 100644
--- a/spec/lib/gitlab/gitaly_client/util_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/util_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::Util do
diff --git a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
index 4fa8e97aca0..cb04f9a1637 100644
--- a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GitalyClient::WikiService do
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index b8df9ad642a..b6c0c0ad523 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# We stub Gitaly in `spec/support/gitaly.rb` for other tests. We don't want
@@ -399,6 +401,8 @@ describe Gitlab::GitalyClient do
context 'when the request store is active', :request_store do
it 'records call details if a RPC is called' do
+ expect(described_class).to receive(:measure_timings).and_call_original
+
gitaly_server.server_version
expect(described_class.list_call_details).not_to be_empty
diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
index 91229d9c7d4..3266ec4ab50 100644
--- a/spec/lib/gitlab/github_import/bulk_importing_spec.rb
+++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::BulkImporting do
diff --git a/spec/lib/gitlab/github_import/caching_spec.rb b/spec/lib/gitlab/github_import/caching_spec.rb
index 70ecdc16da1..18c3e382532 100644
--- a/spec/lib/gitlab/github_import/caching_spec.rb
+++ b/spec/lib/gitlab/github_import/caching_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Caching, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 5b2642d9473..3b269d64b07 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Client do
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 1568c657a1e..484458289af 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::DiffNoteImporter do
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 4713c6795bb..23ed21294e3 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::DiffNotesImporter do
diff --git a/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
index 665b31ef244..399e2d9a563 100644
--- a/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter do
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index dab5767ece1..a003ad7e091 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index e237e79e94b..8920ef9fedb 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::IssuesImporter do
diff --git a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
index e2a71e78574..19d40b2f380 100644
--- a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::LabelLinksImporter do
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
index 156ef96a0fa..2dcf1433154 100644
--- a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb
index 8fd328d9c1e..a02b620f131 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::LfsObjectImporter do
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 50442552eee..bec039a48eb 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
index 120a07ff2b3..eaf63e0e11b 100644
--- a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index 9bdcc42be19..d2b8ba186c8 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::NoteImporter do
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index f046d13f879..128f8f95fa0 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::NotesImporter do
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 8331f0b6bc7..50c27e7f4b7 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index c51985f00a2..e2d810d5ddc 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::PullRequestsImporter do
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
index 6a31c57a73d..f8d53208619 100644
--- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::ReleasesImporter do
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 705df1f4fe7..c65b28fafbf 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Importer::RepositoryImporter do
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index da69911812a..b8a6feb6c73 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
index 8ba766944d6..039ae27ad57 100644
--- a/spec/lib/gitlab/github_import/label_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 1ff5b9d66b3..a1216db7aac 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::MarkdownText do
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
index dff931a2fe8..407e2e67ec9 100644
--- a/spec/lib/gitlab/github_import/milestone_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/github_import/page_counter_spec.rb
index c2613a9a415..87f3ce45fd3 100644
--- a/spec/lib/gitlab/github_import/page_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/page_counter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import/parallel_importer_spec.rb b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
index ecab64a372a..a9b7d3d388c 100644
--- a/spec/lib/gitlab/github_import/parallel_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::ParallelImporter do
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 98205d3ee25..f4d107e3dce 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::ParallelScheduling do
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
index 7b0a1ea4948..e743a87cdd1 100644
--- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::DiffNote do
diff --git a/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb b/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
index 15de0fe49ff..e3b48df4ae9 100644
--- a/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::ExposeAttribute do
diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb
index 99330ce42cb..741a912e53b 100644
--- a/spec/lib/gitlab/github_import/representation/issue_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::Issue do
diff --git a/spec/lib/gitlab/github_import/representation/note_spec.rb b/spec/lib/gitlab/github_import/representation/note_spec.rb
index f2c1c66b357..a171a38bc9e 100644
--- a/spec/lib/gitlab/github_import/representation/note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/note_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::Note do
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
index d478e5ae899..b6dcd098c9c 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::PullRequest do
diff --git a/spec/lib/gitlab/github_import/representation/to_hash_spec.rb b/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
index c296aa0a45b..9c47349b376 100644
--- a/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::ToHash do
diff --git a/spec/lib/gitlab/github_import/representation/user_spec.rb b/spec/lib/gitlab/github_import/representation/user_spec.rb
index 4e63e8ea568..a7ad6bda3ad 100644
--- a/spec/lib/gitlab/github_import/representation/user_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/user_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation::User do
diff --git a/spec/lib/gitlab/github_import/representation_spec.rb b/spec/lib/gitlab/github_import/representation_spec.rb
index 0b0610817b0..76753a0ff21 100644
--- a/spec/lib/gitlab/github_import/representation_spec.rb
+++ b/spec/lib/gitlab/github_import/representation_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::Representation do
diff --git a/spec/lib/gitlab/github_import/sequential_importer_spec.rb b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
index 05d3243f806..8b1e8fbf3b7 100644
--- a/spec/lib/gitlab/github_import/sequential_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::SequentialImporter do
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 29f4c00d9c7..74b5c1c52cd 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 496244c91bf..c3ddac01c87 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GithubImport do
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index d4b6c629659..3290bef8aa5 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ::Gitlab::GlRepository do
diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
index 1dfca0b056c..da307754243 100644
--- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
+++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
verification_status: 'verified'
end
- it 'assigns the gpg key to the signature when the missing gpg key is added' do
+ it 'assigns the gpg key to the signature when the missing gpg key is added', :sidekiq_might_not_need_inline do
# InvalidGpgSignatureUpdater is called by the after_create hook
gpg_key = create :gpg_key,
key: GpgHelpers::User1.public_key,
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
verification_status: 'unknown_key'
end
- it 'updates the signature to being valid when the missing gpg key is added' do
+ it 'updates the signature to being valid when the missing gpg key is added', :sidekiq_might_not_need_inline do
# InvalidGpgSignatureUpdater is called by the after_create hook
gpg_key = create :gpg_key,
key: GpgHelpers::User1.public_key,
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
verification_status: 'unknown_key'
end
- it 'updates the signature to being valid when the user updates the email address' do
+ it 'updates the signature to being valid when the user updates the email address', :sidekiq_might_not_need_inline do
gpg_key = create :gpg_key,
key: GpgHelpers::User1.public_key,
user: user
@@ -152,7 +152,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
)
end
- it 'keeps the signature at being invalid when the changed email address is still unrelated' do
+ it 'keeps the signature at being invalid when the changed email address is still unrelated', :sidekiq_might_not_need_inline do
gpg_key = create :gpg_key,
key: GpgHelpers::User1.public_key,
user: user
@@ -192,7 +192,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
verification_status: 'unknown_key'
end
- it 'updates the signature to being valid when the missing gpg key is added' do
+ it 'updates the signature to being valid when the missing gpg key is added', :sidekiq_might_not_need_inline do
# InvalidGpgSignatureUpdater is called by the after_create hook
gpg_key = create(:gpg_key, key: GpgHelpers::User3.public_key, user: user)
subkey = gpg_key.subkeys.last
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 77d318c9b23..52d6a86f7d0 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Gpg do
@@ -63,7 +65,7 @@ describe Gitlab::Gpg do
it 'downcases the email' do
public_key = double(:key)
fingerprints = double(:fingerprints)
- uid = double(:uid, name: 'Nannie Bernhard', email: 'NANNIE.BERNHARD@EXAMPLE.COM')
+ uid = double(:uid, name: +'Nannie Bernhard', email: +'NANNIE.BERNHARD@EXAMPLE.COM')
raw_key = double(:raw_key, uids: [uid])
allow(Gitlab::Gpg::CurrentKeyChain).to receive(:fingerprints_from_key).with(public_key).and_return(fingerprints)
allow(GPGME::Key).to receive(:find).with(:public, anything).and_return([raw_key])
@@ -78,8 +80,8 @@ describe Gitlab::Gpg do
it 'rejects non UTF-8 names and addresses' do
public_key = double(:key)
fingerprints = double(:fingerprints)
- email = "\xEEch@test.com".force_encoding('ASCII-8BIT')
- uid = double(:uid, name: 'Test User', email: email)
+ email = (+"\xEEch@test.com").force_encoding('ASCII-8BIT')
+ uid = double(:uid, name: +'Test User', email: email)
raw_key = double(:raw_key, uids: [uid])
allow(Gitlab::Gpg::CurrentKeyChain).to receive(:fingerprints_from_key).with(public_key).and_return(fingerprints)
allow(GPGME::Key).to receive(:find).with(:public, anything).and_return([raw_key])
@@ -139,6 +141,96 @@ describe Gitlab::Gpg do
end
end.not_to raise_error
end
+
+ it 'keeps track of created and removed keychains in counters' do
+ created = Gitlab::Metrics.counter(:gpg_tmp_keychains_created_total, 'The number of temporary GPG keychains')
+ removed = Gitlab::Metrics.counter(:gpg_tmp_keychains_removed_total, 'The number of temporary GPG keychains')
+
+ initial_created = created.get
+ initial_removed = removed.get
+
+ described_class.using_tmp_keychain do
+ expect(created.get).to eq(initial_created + 1)
+ expect(removed.get).to eq(initial_removed)
+ end
+
+ expect(removed.get).to eq(initial_removed + 1)
+ end
+
+ it 'cleans up the tmp directory after finishing' do
+ tmp_directory = nil
+
+ described_class.using_tmp_keychain do
+ tmp_directory = described_class.current_home_dir
+ expect(File.exist?(tmp_directory)).to be true
+ end
+
+ expect(tmp_directory).not_to be_nil
+ expect(File.exist?(tmp_directory)).to be false
+ end
+
+ it 'does not fail if the homedir was deleted while running' do
+ expect do
+ described_class.using_tmp_keychain do
+ FileUtils.remove_entry(described_class.current_home_dir)
+ end
+ end.not_to raise_error
+ end
+
+ shared_examples 'multiple deletion attempts of the tmp-dir' do |seconds|
+ let(:tmp_dir) do
+ tmp_dir = Dir.mktmpdir
+ allow(Dir).to receive(:mktmpdir).and_return(tmp_dir)
+ tmp_dir
+ end
+
+ before do
+ # Stub all the other calls for `remove_entry`
+ allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
+ end
+
+ it "tries for #{seconds}" do
+ expect(Retriable).to receive(:retriable).with(a_hash_including(max_elapsed_time: seconds))
+
+ described_class.using_tmp_keychain {}
+ end
+
+ it 'tries at least 2 times to remove the tmp dir before raising', :aggregate_failures do
+ expect(Retriable).to receive(:sleep).at_least(2).times
+ expect(FileUtils).to receive(:remove_entry).with(tmp_dir).at_least(2).times.and_raise('Deletion failed')
+
+ expect { described_class.using_tmp_keychain { } }.to raise_error(described_class::CleanupError)
+ end
+
+ it 'does not attempt multiple times when the deletion succeeds' do
+ expect(Retriable).to receive(:sleep).once
+ expect(FileUtils).to receive(:remove_entry).with(tmp_dir).once.and_raise('Deletion failed')
+ expect(FileUtils).to receive(:remove_entry).with(tmp_dir).and_call_original
+
+ expect { described_class.using_tmp_keychain { } }.not_to raise_error
+
+ expect(File.exist?(tmp_dir)).to be false
+ end
+
+ it 'does not retry when the feature flag is disabled' do
+ stub_feature_flags(gpg_cleanup_retries: false)
+
+ expect(FileUtils).to receive(:remove_entry).with(tmp_dir, true).and_call_original
+ expect(Retriable).not_to receive(:retriable)
+
+ described_class.using_tmp_keychain {}
+ end
+ end
+
+ it_behaves_like 'multiple deletion attempts of the tmp-dir', described_class::FG_CLEANUP_RUNTIME_S
+
+ context 'when running in Sidekiq' do
+ before do
+ allow(Sidekiq).to receive(:server?).and_return(true)
+ end
+
+ it_behaves_like 'multiple deletion attempts of the tmp-dir', described_class::BG_CLEANUP_RUNTIME_S
+ end
end
end
diff --git a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
new file mode 100644
index 00000000000..8d7826c0a56
--- /dev/null
+++ b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do
+ subject { described_class.new }
+
+ let(:mock_request) { OpenStruct.new(env: {}) }
+
+ describe ".parameters" do
+ describe 'when no exception is available' do
+ it 'returns an empty hash' do
+ expect(subject.parameters(mock_request, nil)).to eq({})
+ end
+ end
+
+ describe 'when an exception is available' do
+ let(:exception) { RuntimeError.new('This is a test') }
+ let(:mock_request) do
+ OpenStruct.new(
+ env: {
+ ::API::Helpers::API_EXCEPTION_ENV => exception
+ }
+ )
+ end
+
+ let(:expected) do
+ {
+ exception: {
+ class: 'RuntimeError',
+ message: 'This is a test'
+ }
+ }
+ end
+
+ it 'returns the correct fields' do
+ expect(subject.parameters(mock_request, nil)).to eq(expected)
+ end
+
+ context 'with backtrace' do
+ before do
+ current_backtrace = caller
+ allow(exception).to receive(:backtrace).and_return(current_backtrace)
+ expected[:exception][:backtrace] = Gitlab::Profiler.clean_backtrace(current_backtrace)
+ end
+
+ it 'includes the backtrace' do
+ expect(subject.parameters(mock_request, nil)).to eq(expected)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb b/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb
new file mode 100644
index 00000000000..1fda84f777e
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::FilterableArrayConnection do
+ let(:callback) { proc { |nodes| nodes } }
+ let(:all_nodes) { Gitlab::Graphql::FilterableArray.new(callback, 1, 2, 3, 4, 5) }
+ let(:arguments) { {} }
+ subject(:connection) do
+ described_class.new(all_nodes, arguments, max_page_size: 3)
+ end
+
+ describe '#paged_nodes' do
+ let(:paged_nodes) { subject.paged_nodes }
+
+ it_behaves_like "connection with paged nodes"
+
+ context 'when callback filters some nodes' do
+ let(:callback) { proc { |nodes| nodes[1..-1] } }
+
+ it 'does not return filtered elements' do
+ expect(subject.paged_nodes).to contain_exactly(all_nodes[1], all_nodes[2])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
new file mode 100644
index 00000000000..d943540fe1f
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
+ describe '#build' do
+ let(:condition) { described_class.new(Issue.arel_table, %w(relative_position id), [1500, 500], ['>', '>'], before_or_after) }
+
+ context 'when there is only one ordering field' do
+ let(:condition) { described_class.new(Issue.arel_table, ['id'], [500], ['>'], :after) }
+
+ it 'generates a single condition sql' do
+ expected_sql = <<~SQL
+ ("issues"."id" > 500)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :after' do
+ let(:before_or_after) { :after }
+
+ it 'generates :after sql' do
+ expected_sql = <<~SQL
+ ("issues"."relative_position" > 1500)
+ OR (
+ "issues"."relative_position" = 1500
+ AND
+ "issues"."id" > 500
+ )
+ OR ("issues"."relative_position" IS NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ ("issues"."relative_position" > 1500)
+ OR (
+ "issues"."relative_position" = 1500
+ AND
+ "issues"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
new file mode 100644
index 00000000000..7fce94adb81
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
+ describe '#build' do
+ let(:condition) { described_class.new(Issue.arel_table, %w(relative_position id), [nil, 500], [nil, '>'], before_or_after) }
+
+ context 'when :after' do
+ let(:before_or_after) { :after }
+
+ it 'generates sql' do
+ expected_sql = <<~SQL
+ (
+ "issues"."relative_position" IS NULL
+ AND
+ "issues"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (
+ "issues"."relative_position" IS NULL
+ AND
+ "issues"."id" > 500
+ )
+ OR ("issues"."relative_position" IS NOT NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
new file mode 100644
index 00000000000..9dda2a41ec6
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
@@ -0,0 +1,281 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::Keyset::Connection do
+ let(:nodes) { Project.all.order(id: :asc) }
+ let(:arguments) { {} }
+ subject(:connection) do
+ described_class.new(nodes, arguments, max_page_size: 3)
+ end
+
+ def encoded_cursor(node)
+ described_class.new(nodes, {}).cursor_from_node(node)
+ end
+
+ def decoded_cursor(cursor)
+ JSON.parse(Base64Bp.urlsafe_decode64(cursor))
+ end
+
+ describe '#cursor_from_nodes' do
+ let(:project) { create(:project) }
+ let(:cursor) { connection.cursor_from_node(project) }
+
+ it 'returns an encoded ID' do
+ expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
+ end
+
+ context 'when an order is specified' do
+ let(:nodes) { Project.order(:updated_at) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
+ end
+
+ it 'includes the :id even when not specified in the order' do
+ expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
+ end
+ end
+
+ context 'when multiple orders are specified' do
+ let(:nodes) { Project.order(:updated_at).order(:created_at) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
+ end
+ end
+
+ context 'when multiple orders with SQL are specified' do
+ let(:nodes) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
+ end
+ end
+ end
+
+ describe '#sliced_nodes' do
+ let(:projects) { create_list(:project, 4) }
+
+ context 'when before is passed' do
+ let(:arguments) { { before: encoded_cursor(projects[1]) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(id: :desc) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+ end
+ end
+
+ context 'when after is passed' do
+ let(:arguments) { { after: encoded_cursor(projects[1]) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(id: :desc) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+ end
+ end
+
+ context 'when both before and after are passed' do
+ let(:arguments) do
+ {
+ after: encoded_cursor(projects[1]),
+ before: encoded_cursor(projects[3])
+ }
+ end
+
+ it 'returns the expected set' do
+ expect(subject.sliced_nodes).to contain_exactly(projects[2])
+ end
+ end
+
+ context 'when multiple orders are defined' do
+ let!(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
+ let!(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
+ let!(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
+ let!(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
+ let!(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
+
+ context 'when ascending' do
+ let(:nodes) do
+ Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :asc).order(id: :asc)
+ end
+
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'returns projects in ascending order' do
+ expect(subject.sliced_nodes).to eq([project5, project1, project3, project2, project4])
+ end
+ end
+
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(project3) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project5, project1])
+ end
+ end
+
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(project1) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project3, project2, project4])
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(project4), after: encoded_cursor(project5) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project1, project3, project2])
+ end
+ end
+ end
+
+ context 'when descending' do
+ let(:nodes) do
+ Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :desc).order(id: :asc)
+ end
+
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'only returns projects in descending order' do
+ expect(subject.sliced_nodes).to eq([project3, project1, project5, project2, project4])
+ end
+ end
+
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(project5) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project3, project1])
+ end
+ end
+
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(project1) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project5, project2, project4])
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(project4), after: encoded_cursor(project3) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project1, project5, project2])
+ end
+ end
+ end
+ end
+
+ # TODO Enable this as part of below issue
+ # https://gitlab.com/gitlab-org/gitlab/issues/32933
+ # context 'when an invalid cursor is provided' do
+ # let(:arguments) { { before: 'invalidcursor' } }
+ #
+ # it 'raises an error' do
+ # expect { expect(subject.sliced_nodes) }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ # end
+ # end
+
+ # TODO Remove this as part of below issue
+ # https://gitlab.com/gitlab-org/gitlab/issues/32933
+ context 'when an old style cursor is provided' do
+ let(:arguments) { { before: Base64Bp.urlsafe_encode64(projects[1].id.to_s, padding: false) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+ end
+ end
+
+ describe '#paged_nodes' do
+ let_it_be(:all_nodes) { create_list(:project, 5) }
+ let(:paged_nodes) { subject.paged_nodes }
+
+ it_behaves_like "connection with paged nodes"
+
+ context 'when both are passed' do
+ let(:arguments) { { first: 2, last: 2 } }
+
+ it 'raises an error' do
+ expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+
+ context 'when primary key is not in original order' do
+ let(:nodes) { Project.order(last_repository_check_at: :desc) }
+
+ it 'is added to end' do
+ sliced = subject.sliced_nodes
+ last_order_name = sliced.order_values.last.expr.name
+
+ expect(last_order_name).to eq sliced.primary_key
+ end
+ end
+
+ context 'when there is no primary key' do
+ let(:nodes) { NoPrimaryKey.all }
+
+ it 'raises an error' do
+ expect(NoPrimaryKey.primary_key).to be_nil
+ expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
+ end
+ end
+ end
+
+ class NoPrimaryKey < ActiveRecord::Base
+ self.table_name = 'no_primary_key'
+ self.primary_key = nil
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/legacy_keyset_connection_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/legacy_keyset_connection_spec.rb
new file mode 100644
index 00000000000..aaf28fed684
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset/legacy_keyset_connection_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+# TODO https://gitlab.com/gitlab-org/gitlab/issues/35104
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::Keyset::LegacyKeysetConnection do
+ describe 'old keyset_connection' do
+ let(:described_class) { Gitlab::Graphql::Connections::Keyset::Connection }
+ let(:nodes) { Project.all.order(id: :asc) }
+ let(:arguments) { {} }
+ subject(:connection) do
+ described_class.new(nodes, arguments, max_page_size: 3)
+ end
+
+ before do
+ stub_feature_flags(graphql_keyset_pagination: false)
+ end
+
+ def encoded_property(value)
+ Base64Bp.urlsafe_encode64(value.to_s, padding: false)
+ end
+
+ describe '#cursor_from_nodes' do
+ let(:project) { create(:project) }
+
+ it 'returns an encoded ID' do
+ expect(connection.cursor_from_node(project))
+ .to eq(encoded_property(project.id))
+ end
+
+ context 'when an order was specified' do
+ let(:nodes) { Project.order(:updated_at) }
+
+ it 'returns the encoded value of the order' do
+ expect(connection.cursor_from_node(project))
+ .to eq(encoded_property(project.updated_at))
+ end
+ end
+ end
+
+ describe '#sliced_nodes' do
+ let(:projects) { create_list(:project, 4) }
+
+ context 'when before is passed' do
+ let(:arguments) { { before: encoded_property(projects[1].id) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(id: :desc) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+ end
+ end
+
+ context 'when after is passed' do
+ let(:arguments) { { after: encoded_property(projects[1].id) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(id: :desc) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+ end
+ end
+
+ context 'when both before and after are passed' do
+ let(:arguments) do
+ {
+ after: encoded_property(projects[1].id),
+ before: encoded_property(projects[3].id)
+ }
+ end
+
+ it 'returns the expected set' do
+ expect(subject.sliced_nodes).to contain_exactly(projects[2])
+ end
+ end
+ end
+
+ describe '#paged_nodes' do
+ let!(:projects) { create_list(:project, 5) }
+
+ it 'returns the collection limited to max page size' do
+ expect(subject.paged_nodes.size).to eq(3)
+ end
+
+ it 'is a loaded memoized array' do
+ expect(subject.paged_nodes).to be_an(Array)
+ expect(subject.paged_nodes.object_id).to eq(subject.paged_nodes.object_id)
+ end
+
+ context 'when `first` is passed' do
+ let(:arguments) { { first: 2 } }
+
+ it 'returns only the first elements' do
+ expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
+ end
+ end
+
+ context 'when `last` is passed' do
+ let(:arguments) { { last: 2 } }
+
+ it 'returns only the last elements' do
+ expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
+ end
+ end
+
+ context 'when both are passed' do
+ let(:arguments) { { first: 2, last: 2 } }
+
+ it 'raises an error' do
+ expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
new file mode 100644
index 00000000000..17ddcaefeeb
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::Keyset::OrderInfo do
+ describe '#build_order_list' do
+ let(:order_list) { described_class.build_order_list(relation) }
+
+ context 'when multiple orders with SQL is specified' do
+ let(:relation) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
+
+ it 'ignores the SQL order' do
+ expect(order_list.count).to eq 2
+ expect(order_list.first.attribute_name).to eq 'updated_at'
+ expect(order_list.first.operator_for(:after)).to eq '>'
+ expect(order_list.last.attribute_name).to eq 'id'
+ expect(order_list.last.operator_for(:after)).to eq '>'
+ end
+ end
+
+ context 'when order contains NULLS LAST' do
+ let(:relation) { Project.order(Arel.sql('projects.updated_at Asc Nulls Last')).order(:id) }
+
+ it 'does not ignore the SQL order' do
+ expect(order_list.count).to eq 2
+ expect(order_list.first.attribute_name).to eq 'projects.updated_at'
+ expect(order_list.first.operator_for(:after)).to eq '>'
+ expect(order_list.last.attribute_name).to eq 'id'
+ expect(order_list.last.operator_for(:after)).to eq '>'
+ end
+ end
+
+ context 'when order contains invalid formatted NULLS LAST ' do
+ let(:relation) { Project.order(Arel.sql('projects.updated_at created_at Asc Nulls Last')).order(:id) }
+
+ it 'ignores the SQL order' do
+ expect(order_list.count).to eq 1
+ end
+ end
+ end
+
+ describe '#validate_ordering' do
+ let(:order_list) { described_class.build_order_list(relation) }
+
+ context 'when number of ordering fields is 0' do
+ let(:relation) { Project.all }
+
+ it 'raises an error' do
+ expect { described_class.validate_ordering(relation, order_list) }
+ .to raise_error(ArgumentError, 'A minimum of 1 ordering field is required')
+ end
+ end
+
+ context 'when number of ordering fields is over 2' do
+ let(:relation) { Project.order(last_repository_check_at: :desc).order(updated_at: :desc).order(:id) }
+
+ it 'raises an error' do
+ expect { described_class.validate_ordering(relation, order_list) }
+ .to raise_error(ArgumentError, 'A maximum of 2 ordering fields are allowed')
+ end
+ end
+
+ context 'when the second (or first) column is nullable' do
+ let(:relation) { Project.order(last_repository_check_at: :desc).order(updated_at: :desc) }
+
+ it 'raises an error' do
+ expect { described_class.validate_ordering(relation, order_list) }
+ .to raise_error(ArgumentError, "Column `updated_at` must not allow NULL")
+ end
+ end
+
+ context 'for last ordering field' do
+ let(:relation) { Project.order(namespace_id: :desc) }
+
+ it 'raises error if primary key is not last field' do
+ expect { described_class.validate_ordering(relation, order_list) }
+ .to raise_error(ArgumentError, "Last ordering field must be the primary key, `#{relation.primary_key}`")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
new file mode 100644
index 00000000000..59e153d9e07
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do
+ context 'when number of ordering fields is 0' do
+ it 'raises an error' do
+ expect { described_class.new(Issue.arel_table, [], {}, :after) }
+ .to raise_error(ArgumentError, 'No ordering scopes have been supplied')
+ end
+ end
+
+ describe '#conditions' do
+ let(:relation) { Issue.order(relative_position: :desc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+ let(:builder) { described_class.new(arel_table, order_list, decoded_cursor, before_or_after) }
+ let(:before_or_after) { :after }
+
+ context 'when only a single ordering' do
+ let(:relation) { Issue.order(id: :desc) }
+
+ context 'when the value is nil' do
+ let(:decoded_cursor) { { 'id' => nil } }
+
+ it 'raises an error' do
+ expect { builder.conditions }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'Before/after cursor invalid: `nil` was provided as only sortable value')
+ end
+ end
+
+ context 'when value is not nil' do
+ let(:decoded_cursor) { { 'id' => 100 } }
+ let(:conditions) { builder.conditions }
+
+ context 'when :after' do
+ it 'generates the correct condition' do
+ expect(conditions.strip).to eq '("issues"."id" < 100)'
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ expect(conditions.strip).to eq '("issues"."id" > 100)'
+ end
+ end
+ end
+ end
+
+ context 'when two orderings' do
+ let(:decoded_cursor) { { 'relative_position' => 1500, 'id' => 100 } }
+
+ context 'when no values are nil' do
+ context 'when :after' do
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '"issues"."relative_position" < 1500'
+ expect(conditions).to include '"issues"."id" > 100'
+ expect(conditions).to include 'OR ("issues"."relative_position" IS NULL)'
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '("issues"."relative_position" > 1500)'
+ expect(conditions).to include '"issues"."id" < 100'
+ expect(conditions).to include '"issues"."relative_position" = 1500'
+ end
+ end
+ end
+
+ context 'when first value is nil' do
+ let(:decoded_cursor) { { 'relative_position' => nil, 'id' => 100 } }
+
+ context 'when :after' do
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '"issues"."relative_position" IS NULL'
+ expect(conditions).to include '"issues"."id" > 100'
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '"issues"."relative_position" IS NULL'
+ expect(conditions).to include '"issues"."id" < 100'
+ expect(conditions).to include 'OR ("issues"."relative_position" IS NOT NULL)'
+ end
+ end
+ end
+ end
+ end
+
+ def arel_table
+ Issue.arel_table
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb b/spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb
deleted file mode 100644
index 4eb121794e1..00000000000
--- a/spec/lib/gitlab/graphql/connections/keyset_connection_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::Graphql::Connections::KeysetConnection do
- let(:nodes) { Project.all.order(id: :asc) }
- let(:arguments) { {} }
- subject(:connection) do
- described_class.new(nodes, arguments, max_page_size: 3)
- end
-
- def encoded_property(value)
- Base64Bp.urlsafe_encode64(value.to_s, padding: false)
- end
-
- describe '#cursor_from_nodes' do
- let(:project) { create(:project) }
-
- it 'returns an encoded ID' do
- expect(connection.cursor_from_node(project))
- .to eq(encoded_property(project.id))
- end
-
- context 'when an order was specified' do
- let(:nodes) { Project.order(:updated_at) }
-
- it 'returns the encoded value of the order' do
- expect(connection.cursor_from_node(project))
- .to eq(encoded_property(project.updated_at))
- end
- end
- end
-
- describe '#sliced_nodes' do
- let(:projects) { create_list(:project, 4) }
-
- context 'when before is passed' do
- let(:arguments) { { before: encoded_property(projects[1].id) } }
-
- it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(projects.first)
- end
-
- context 'when the sort order is descending' do
- let(:nodes) { Project.all.order(id: :desc) }
-
- it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
- end
- end
- end
-
- context 'when after is passed' do
- let(:arguments) { { after: encoded_property(projects[1].id) } }
-
- it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
- end
-
- context 'when the sort order is descending' do
- let(:nodes) { Project.all.order(id: :desc) }
-
- it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(projects.first)
- end
- end
- end
-
- context 'when both before and after are passed' do
- let(:arguments) do
- {
- after: encoded_property(projects[1].id),
- before: encoded_property(projects[3].id)
- }
- end
-
- it 'returns the expected set' do
- expect(subject.sliced_nodes).to contain_exactly(projects[2])
- end
- end
- end
-
- describe '#paged_nodes' do
- let!(:projects) { create_list(:project, 5) }
-
- it 'returns the collection limited to max page size' do
- expect(subject.paged_nodes.size).to eq(3)
- end
-
- it 'is a loaded memoized array' do
- expect(subject.paged_nodes).to be_an(Array)
- expect(subject.paged_nodes.object_id).to eq(subject.paged_nodes.object_id)
- end
-
- context 'when `first` is passed' do
- let(:arguments) { { first: 2 } }
-
- it 'returns only the first elements' do
- expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
- end
- end
-
- context 'when `last` is passed' do
- let(:arguments) { { last: 2 } }
-
- it 'returns only the last elements' do
- expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
- end
- end
-
- context 'when both are passed' do
- let(:arguments) { { first: 2, last: 2 } }
-
- it 'raises an error' do
- expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb
deleted file mode 100644
index 136027736c3..00000000000
--- a/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::Graphql::Loaders::PipelineForShaLoader do
- include GraphqlHelpers
-
- describe '#find_last' do
- it 'batch-resolves latest pipeline' do
- project = create(:project, :repository)
- pipeline1 = create(:ci_pipeline, project: project, ref: project.default_branch, sha: project.commit.sha)
- pipeline2 = create(:ci_pipeline, project: project, ref: project.default_branch, sha: project.commit.sha)
- pipeline3 = create(:ci_pipeline, project: project, ref: 'improve/awesome', sha: project.commit('improve/awesome').sha)
-
- result = batch_sync(max_queries: 1) do
- [pipeline1.sha, pipeline3.sha].map { |sha| described_class.new(project, sha).find_last }
- end
-
- expect(result).to contain_exactly(pipeline2, pipeline3)
- end
- end
-end
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 53a91a35ec9..570b0cb7401 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::GroupSearchResults do
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index 8e253b51597..ce7f2c4530d 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -42,7 +42,7 @@ describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
subject.bulk_migrate(start: ids.min, finish: ids.max)
end
- it 'has all projects migrated and set as writable' do
+ it 'has all projects migrated and set as writable', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
subject.bulk_migrate(start: ids.min, finish: ids.max)
end
@@ -79,7 +79,7 @@ describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
subject.bulk_rollback(start: ids.min, finish: ids.max)
end
- it 'has all projects rolledback and set as writable' do
+ it 'has all projects rolledback and set as writable', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
subject.bulk_rollback(start: ids.min, finish: ids.max)
end
@@ -108,7 +108,7 @@ describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
expect { subject.migrate(project) }.not_to raise_error
end
- it 'migrates project storage' do
+ it 'migrates project storage', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
subject.migrate(project)
end
@@ -154,7 +154,7 @@ describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
expect { subject.rollback(project) }.not_to raise_error
end
- it 'rolls-back project storage' do
+ it 'rolls-back project storage', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
subject.rollback(project)
end
diff --git a/spec/lib/gitlab/health_checks/master_check_spec.rb b/spec/lib/gitlab/health_checks/master_check_spec.rb
new file mode 100644
index 00000000000..91441a7ddc3
--- /dev/null
+++ b/spec/lib/gitlab/health_checks/master_check_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+require_relative './simple_check_shared'
+
+describe Gitlab::HealthChecks::MasterCheck do
+ let(:result_class) { Gitlab::HealthChecks::Result }
+
+ SUCCESS_CODE = 100
+ FAILURE_CODE = 101
+
+ before do
+ described_class.register_master
+ end
+
+ after do
+ described_class.finish_master
+ end
+
+ describe '#readiness' do
+ context 'when master is running' do
+ it 'worker does return success' do
+ _, child_status = run_worker
+
+ expect(child_status.exitstatus).to eq(SUCCESS_CODE)
+ end
+ end
+
+ context 'when master finishes early' do
+ before do
+ described_class.send(:close_write)
+ end
+
+ it 'worker does return failure' do
+ _, child_status = run_worker
+
+ expect(child_status.exitstatus).to eq(FAILURE_CODE)
+ end
+ end
+
+ def run_worker
+ pid = fork do
+ described_class.register_worker
+
+ exit(described_class.readiness.success ? SUCCESS_CODE : FAILURE_CODE)
+ end
+
+ Process.wait2(pid)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 4676db6b8d8..5a45d724b83 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Highlight do
diff --git a/spec/lib/gitlab/http_io_spec.rb b/spec/lib/gitlab/http_io_spec.rb
index 788bddb8f59..f30528916dc 100644
--- a/spec/lib/gitlab/http_io_spec.rb
+++ b/spec/lib/gitlab/http_io_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::HttpIO do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index d3f9be845dd..192816ad057 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::HTTP do
diff --git a/spec/lib/gitlab/i18n_spec.rb b/spec/lib/gitlab/i18n_spec.rb
index 785035d993f..2664423af88 100644
--- a/spec/lib/gitlab/i18n_spec.rb
+++ b/spec/lib/gitlab/i18n_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::I18n do
diff --git a/spec/lib/gitlab/identifier_spec.rb b/spec/lib/gitlab/identifier_spec.rb
index 1e583f4cee2..9c7972d4bde 100644
--- a/spec/lib/gitlab/identifier_spec.rb
+++ b/spec/lib/gitlab/identifier_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Identifier do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 4fd61383c6b..8f627fcc24d 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -29,6 +29,9 @@ issues:
- prometheus_alerts
- prometheus_alert_events
- self_managed_prometheus_alert_events
+- zoom_meetings
+- vulnerability_links
+- related_vulnerabilities
events:
- author
- project
@@ -119,6 +122,7 @@ merge_requests:
- pipelines_for_merge_request
- merge_request_assignees
- suggestions
+- unresolved_notes
- assignees
- reviews
- approval_rules
@@ -338,6 +342,7 @@ project:
- triggers
- pipeline_schedules
- environments
+- environments_for_dashboard
- deployments
- project_feature
- auto_devops
@@ -421,6 +426,12 @@ project:
- pages_metadatum
- alerts_service
- grafana_integration
+- remove_source_branch_after_merge
+- deleting_user
+- upstream_projects
+- downstream_projects
+- upstream_project_subscriptions
+- downstream_project_subscriptions
award_emoji:
- awardable
- user
@@ -528,4 +539,6 @@ versions: &version
- issue
- designs
- actions
+zoom_meetings:
+- issue
design_versions: *version
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 934e676d020..b190a1007a0 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -132,10 +132,6 @@ describe Gitlab::ImportExport::FastHashSerializer do
end
it 'has no when YML attributes but only the DB column' do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:ci_yaml_file)
- .and_return(File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')))
-
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
subject
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index 71fd5a51c3b..5752fd8fa0d 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -47,7 +47,7 @@ describe 'forked project import' do
end
end
- it 'can access the MR' do
+ it 'can access the MR', :sidekiq_might_not_need_inline do
project.merge_requests.first.fetch_ref!
expect(project.repository.ref_exists?('refs/merge-requests/1/head')).to be_truthy
diff --git a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
index 6a803c48b34..1a5cb7806a3 100644
--- a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::ImportExport::GroupProjectObjectBuilder do
let(:project) do
- create(:project,
+ create(:project, :repository,
:builds_disabled,
:issues_disabled,
name: 'project',
@@ -11,8 +11,8 @@ describe Gitlab::ImportExport::GroupProjectObjectBuilder do
end
context 'labels' do
- it 'finds the right group label' do
- group_label = create(:group_label, 'name': 'group label', 'group': project.group)
+ it 'finds the existing group label' do
+ group_label = create(:group_label, name: 'group label', group: project.group)
expect(described_class.build(Label,
'title' => 'group label',
@@ -31,8 +31,8 @@ describe Gitlab::ImportExport::GroupProjectObjectBuilder do
end
context 'milestones' do
- it 'finds the right group milestone' do
- milestone = create(:milestone, 'name' => 'group milestone', 'group' => project.group)
+ it 'finds the existing group milestone' do
+ milestone = create(:milestone, name: 'group milestone', group: project.group)
expect(described_class.build(Milestone,
'title' => 'group milestone',
@@ -49,4 +49,30 @@ describe Gitlab::ImportExport::GroupProjectObjectBuilder do
expect(milestone.persisted?).to be true
end
end
+
+ context 'merge_request' do
+ it 'finds the existing merge_request' do
+ merge_request = create(:merge_request, title: 'MergeRequest', iid: 7, target_project: project, source_project: project)
+ expect(described_class.build(MergeRequest,
+ 'title' => 'MergeRequest',
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'source_branch' => 'SourceBranch',
+ 'iid' => 7,
+ 'target_branch' => 'TargetBranch',
+ 'author_id' => project.creator.id)).to eq(merge_request)
+ end
+
+ it 'creates a new merge_request' do
+ merge_request = described_class.build(MergeRequest,
+ 'title' => 'MergeRequest',
+ 'iid' => 8,
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'source_branch' => 'SourceBranch',
+ 'target_branch' => 'TargetBranch',
+ 'author_id' => project.creator.id)
+ expect(merge_request.persisted?).to be true
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
new file mode 100644
index 00000000000..b856441981a
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
@@ -0,0 +1,180 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::GroupTreeSaver do
+ describe 'saves the group tree into a json object' do
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) }
+ let(:export_path) { "#{Dir.tmpdir}/group_tree_saver_spec" }
+ let(:user) { create(:user) }
+ let!(:group) { setup_group }
+
+ before do
+ group.add_maintainer(user)
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ it 'saves group successfully' do
+ expect(group_tree_saver.save).to be true
+ end
+
+ context ':export_fast_serialize feature flag checks' do
+ before do
+ expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared, config: group_config).and_return(reader)
+ expect(reader).to receive(:group_tree).and_return(group_tree)
+ end
+
+ let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
+ let(:group_config) { Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.group_config_file).to_h }
+ let(:group_tree) do
+ {
+ include: [{ milestones: { include: [] } }],
+ preload: { milestones: nil }
+ }
+ end
+
+ context 'when :export_fast_serialize feature is enabled' do
+ let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
+
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+
+ expect(Gitlab::ImportExport::FastHashSerializer).to receive(:new).with(group, group_tree).and_return(serializer)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(serializer).to receive(:execute)
+
+ group_tree_saver.save
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(group).to receive(:as_json).with(group_tree).and_call_original
+
+ group_tree_saver.save
+ end
+ end
+ end
+
+ # It is mostly duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ # except:
+ # context 'with description override' do
+ # context 'group members' do
+ # ^ These are specific for the groupTreeSaver
+ context 'JSON' do
+ let(:saved_group_json) do
+ group_tree_saver.save
+ group_json(group_tree_saver.full_path)
+ end
+
+ it 'saves the correct json' do
+ expect(saved_group_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
+ end
+
+ it 'has milestones' do
+ expect(saved_group_json['milestones']).not_to be_empty
+ end
+
+ it 'has labels' do
+ expect(saved_group_json['labels']).not_to be_empty
+ end
+
+ it 'has boards' do
+ expect(saved_group_json['boards']).not_to be_empty
+ end
+
+ it 'has group members' do
+ expect(saved_group_json['members']).not_to be_empty
+ end
+
+ it 'has priorities associated to labels' do
+ expect(saved_group_json['labels'].first['priorities']).not_to be_empty
+ end
+
+ it 'has badges' do
+ expect(saved_group_json['badges']).not_to be_empty
+ end
+
+ context 'group children' do
+ let(:children) { group.children }
+
+ it 'exports group children' do
+ expect(saved_group_json['children'].length).to eq(children.count)
+ end
+
+ it 'exports group children of children' do
+ expect(saved_group_json['children'].first['children'].length).to eq(children.first.children.count)
+ end
+ end
+
+ context 'group members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ saved_group_json['members'].map do |pm|
+ pm['user']['email']
+ end
+ end
+
+ before do
+ group.add_developer(user2)
+ end
+
+ it 'exports group members as group owner' do
+ group.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members' do
+ member_types = saved_group_json['members'].map { |pm| pm['source_type'] }
+
+ expect(member_types).to all(eq('Namespace'))
+ end
+ end
+ end
+
+ context 'group attributes' do
+ it 'does not contain the runners token' do
+ expect(saved_group_json).not_to include("runners_token" => 'token')
+ end
+ end
+ end
+ end
+
+ def setup_group
+ group = create(:group, description: 'description')
+ sub_group = create(:group, description: 'description', parent: group)
+ create(:group, description: 'description', parent: sub_group)
+ create(:milestone, group: group)
+ create(:group_badge, group: group)
+ group_label = create(:group_label, group: group)
+ create(:label_priority, label: group_label, priority: 1)
+ create(:board, group: group)
+ create(:group_badge, group: group)
+
+ group
+ end
+
+ def group_json(filename)
+ JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/import_export_spec.rb b/spec/lib/gitlab/import_export/import_export_spec.rb
index 40a5f2294a2..a6b0dc758cd 100644
--- a/spec/lib/gitlab/import_export/import_export_spec.rb
+++ b/spec/lib/gitlab/import_export/import_export_spec.rb
@@ -6,17 +6,17 @@ describe Gitlab::ImportExport do
let(:project) { create(:project, :public, path: 'project-path', namespace: group) }
it 'contains the project path' do
- expect(described_class.export_filename(project: project)).to include(project.path)
+ expect(described_class.export_filename(exportable: project)).to include(project.path)
end
it 'contains the namespace path' do
- expect(described_class.export_filename(project: project)).to include(project.namespace.full_path.tr('/', '_'))
+ expect(described_class.export_filename(exportable: project)).to include(project.namespace.full_path.tr('/', '_'))
end
it 'does not go over a certain length' do
project.path = 'a' * 100
- expect(described_class.export_filename(project: project).length).to be < 70
+ expect(described_class.export_filename(exportable: project).length).to be < 70
end
end
end
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index ebd2c6089ce..459b1eed1a7 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -2,6 +2,8 @@ require 'spec_helper'
include ImportExport::CommonUtil
describe Gitlab::ImportExport::ProjectTreeRestorer do
+ include ImportExport::CommonUtil
+
let(:shared) { project.import_export_shared }
describe 'restore project tree' do
@@ -16,7 +18,8 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
RSpec::Mocks.with_temporary_scope do
@project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
@shared = @project.import_export_shared
- allow(@shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
+
+ setup_import_export_config('complex')
allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false)
@@ -207,10 +210,27 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(@project.project_badges.count).to eq(2)
end
+ it 'has snippets' do
+ expect(@project.snippets.count).to eq(1)
+ end
+
+ it 'has award emoji for a snippet' do
+ award_emoji = @project.snippets.first.award_emoji
+
+ expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee')
+ end
+
it 'restores the correct service' do
expect(CustomIssueTrackerService.first).not_to be_nil
end
+ it 'restores zoom meetings' do
+ meetings = @project.issues.first.zoom_meetings
+
+ expect(meetings.count).to eq(1)
+ expect(meetings.first.url).to eq('https://zoom.us/j/123456789')
+ end
+
context 'Merge requests' do
it 'always has the new project as a target' do
expect(MergeRequest.find_by_title('MR1').target_project).to eq(@project)
@@ -250,9 +270,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'has the correct number of pipelines and statuses' do
- expect(@project.ci_pipelines.size).to eq(5)
+ expect(@project.ci_pipelines.size).to eq(6)
- @project.ci_pipelines.zip([2, 2, 2, 2, 2])
+ @project.ci_pipelines.order(:id).zip([2, 2, 2, 2, 2, 0])
.each do |(pipeline, expected_status_size)|
expect(pipeline.statuses.size).to eq(expected_status_size)
end
@@ -261,7 +281,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'when restoring hierarchy of pipeline, stages and jobs' do
it 'restores pipelines' do
- expect(Ci::Pipeline.all.count).to be 5
+ expect(Ci::Pipeline.all.count).to be 6
end
it 'restores pipeline stages' do
@@ -307,21 +327,33 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
- context 'Light JSON' do
+ context 'project.json file access check' do
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
let(:restored_project_json) { project_tree_restorer.restore }
- before do
- allow(shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ setup_symlink(tmpdir, 'project.json')
+ allow(shared).to receive(:export_path).and_call_original
+
+ expect(project_tree_restorer.restore).to eq(false)
+ expect(shared.errors).to include('Incorrect JSON format')
+ end
end
+ end
+
+ context 'Light JSON' do
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+ let(:restored_project_json) { project_tree_restorer.restore }
context 'with a simple project' do
before do
- project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
-
- restored_project_json
+ setup_import_export_config('light')
+ expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project correctly',
@@ -332,19 +364,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
first_issue_labels: 1,
services: 1
- context 'project.json file access check' do
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- setup_symlink(tmpdir, 'project.json')
- allow(shared).to receive(:export_path).and_call_original
-
- restored_project_json
-
- expect(shared.errors).to be_empty
- end
- end
- end
-
context 'when there is an existing build with build token' do
before do
create(:ci_build, token: 'abcd')
@@ -360,6 +379,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
context 'when the project has overridden params in import data' do
+ before do
+ setup_import_export_config('light')
+ end
+
it 'handles string versions of visibility_level' do
# Project needs to be in a group for visibility level comparison
# to happen
@@ -368,24 +391,21 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
project.create_import_data(data: { override_params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } })
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end
it 'overwrites the params stored in the JSON' do
project.create_import_data(data: { override_params: { description: "Overridden" } })
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.description).to eq("Overridden")
end
it 'does not allow setting params that are excluded from import_export settings' do
project.create_import_data(data: { override_params: { lfs_enabled: true } })
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.lfs_enabled).to be_falsey
end
@@ -401,7 +421,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
project.create_import_data(data: { override_params: disabled_access_levels })
- restored_project_json
+ expect(restored_project_json).to eq(true)
aggregate_failures do
access_level_keys.each do |key|
@@ -422,9 +442,8 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
- project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.group.json")
-
- restored_project_json
+ setup_import_export_config('group')
+ expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project correctly',
@@ -456,11 +475,11 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
- project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
+ setup_import_export_config('light')
end
it 'does not import any templated services' do
- restored_project_json
+ expect(restored_project_json).to eq(true)
expect(project.services.where(template: true).count).to eq(0)
end
@@ -470,8 +489,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.labels.count).to eq(1)
end
@@ -480,8 +498,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.group.milestones.count).to eq(1)
expect(project.milestones.count).to eq(0)
end
@@ -497,13 +514,14 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
group: create(:group))
end
- it 'preserves the project milestone IID' do
- project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json")
+ before do
+ setup_import_export_config('milestone-iid')
+ end
+ it 'preserves the project milestone IID' do
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.milestones.count).to eq(2)
expect(Milestone.find_by_title('Another milestone').iid).to eq(1)
expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2)
@@ -511,19 +529,21 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
context 'with external authorization classification labels' do
+ before do
+ setup_import_export_config('light')
+ end
+
it 'converts empty external classification authorization labels to nil' do
project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } })
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.external_authorization_classification_label).to be_nil
end
it 'preserves valid external classification authorization labels' do
project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } })
- restored_project_json
-
+ expect(restored_project_json).to eq(true)
expect(project.external_authorization_classification_label).to eq("foobar")
end
end
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index ff46e062a5d..97d8b155826 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -203,7 +203,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
end
it 'has no when YML attributes but only the DB column' do
- allow_any_instance_of(Ci::Pipeline).to receive(:ci_yaml_file).and_return(File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')))
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
saved_project_json
diff --git a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
index 472bf55d37e..d62f5725f9e 100644
--- a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe Gitlab::ImportExport::RelationRenameService do
+ include ImportExport::CommonUtil
+
let(:renames) do
{
'example_relation1' => 'new_example_relation1',
@@ -21,12 +23,12 @@ describe Gitlab::ImportExport::RelationRenameService do
context 'when importing' do
let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) }
- let(:import_path) { 'spec/fixtures/lib/gitlab/import_export' }
- let(:file_content) { IO.read("#{import_path}/project.json") }
- let!(:json_file) { ActiveSupport::JSON.decode(file_content) }
+ let(:file_content) { IO.read(File.join(shared.export_path, 'project.json')) }
+ let(:json_file) { ActiveSupport::JSON.decode(file_content) }
before do
- allow(shared).to receive(:export_path).and_return(import_path)
+ setup_import_export_config('complex')
+
allow(ActiveSupport::JSON).to receive(:decode).and_call_original
allow(ActiveSupport::JSON).to receive(:decode).with(file_content).and_return(json_file)
end
@@ -94,15 +96,20 @@ describe Gitlab::ImportExport::RelationRenameService do
let(:export_content_path) { project_tree_saver.full_path }
let(:export_content_hash) { ActiveSupport::JSON.decode(File.read(export_content_path)) }
let(:injected_hash) { renames.values.product([{}]).to_h }
+ let(:relation_tree_saver) { Gitlab::ImportExport::RelationTreeSaver.new }
let(:project_tree_saver) do
Gitlab::ImportExport::ProjectTreeSaver.new(
project: project, current_user: user, shared: shared)
end
+ before do
+ allow(project_tree_saver).to receive(:tree_saver).and_return(relation_tree_saver)
+ end
+
it 'adds old relationships to the exported file' do
# we inject relations with new names that should be rewritten
- expect(project_tree_saver).to receive(:serialize_project_tree).and_wrap_original do |method, *args|
+ expect(relation_tree_saver).to receive(:serialize).and_wrap_original do |method, *args|
method.call(*args).merge(injected_hash)
end
diff --git a/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb
new file mode 100644
index 00000000000..2fc26c0e3d4
--- /dev/null
+++ b/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::RelationTreeSaver do
+ let(:exportable) { create(:group) }
+ let(:relation_tree_saver) { described_class.new }
+ let(:tree) { {} }
+
+ describe '#serialize' do
+ context 'when :export_fast_serialize feature is enabled' do
+ let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
+
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(Gitlab::ImportExport::FastHashSerializer)
+ .to receive(:new)
+ .with(exportable, tree)
+ .and_return(serializer)
+
+ expect(serializer).to receive(:execute)
+
+ relation_tree_saver.serialize(exportable, tree)
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(exportable).to receive(:as_json).with(tree)
+
+ relation_tree_saver.serialize(exportable, tree)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 8ae571a69ef..04fe985cdb5 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -185,6 +185,7 @@ MergeRequest:
- merge_when_pipeline_succeeds
- merge_user_id
- merge_commit_sha
+- squash_commit_sha
- in_progress_merge_commit_sha
- lock_version
- milestone_id
@@ -512,6 +513,7 @@ Project:
- request_access_enabled
- has_external_wiki
- only_allow_merge_if_all_discussions_are_resolved
+- remove_source_branch_after_merge
- auto_cancel_pending_pipelines
- printing_merge_request_link_enabled
- resolve_outdated_diff_discussions
@@ -537,7 +539,6 @@ Project:
- external_webhook_token
- pages_https_only
- merge_requests_disable_committers_approval
-- merge_requests_require_code_owner_approval
- require_password_to_approve
ProjectTracingSetting:
- external_url
@@ -752,4 +753,12 @@ DesignManagement::Version:
- created_at
- sha
- issue_id
-- user_id
+- author_id
+ZoomMeeting:
+- id
+- issue_id
+- project_id
+- issue_status
+- url
+- created_at
+- updated_at
diff --git a/spec/lib/gitlab/import_export/saver_spec.rb b/spec/lib/gitlab/import_export/saver_spec.rb
index d185ff2dfcc..aca63953677 100644
--- a/spec/lib/gitlab/import_export/saver_spec.rb
+++ b/spec/lib/gitlab/import_export/saver_spec.rb
@@ -5,7 +5,7 @@ describe Gitlab::ImportExport::Saver do
let!(:project) { create(:project, :public, name: 'project') }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
- subject { described_class.new(project: project, shared: shared) }
+ subject { described_class.new(exportable: project, shared: shared) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb
index 62669836973..fc011f7e1be 100644
--- a/spec/lib/gitlab/import_export/shared_spec.rb
+++ b/spec/lib/gitlab/import_export/shared_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::ImportExport::Shared do
context 'with a repository on disk' do
let(:project) { create(:project, :repository) }
- let(:base_path) { %(/tmp/project_exports/#{project.disk_path}/) }
+ let(:base_path) { %(/tmp/gitlab_exports/#{project.disk_path}/) }
describe '#archive_path' do
it 'uses a random hash to avoid conflicts' do
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index 8060b5d4448..265241dc2af 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ImportSources do
diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb
index 2db62ab983a..598336d0b31 100644
--- a/spec/lib/gitlab/incoming_email_spec.rb
+++ b/spec/lib/gitlab/incoming_email_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require "spec_helper"
describe Gitlab::IncomingEmail do
diff --git a/spec/lib/gitlab/insecure_key_fingerprint_spec.rb b/spec/lib/gitlab/insecure_key_fingerprint_spec.rb
index 6532579b1c9..7f20ae98b06 100644
--- a/spec/lib/gitlab/insecure_key_fingerprint_spec.rb
+++ b/spec/lib/gitlab/insecure_key_fingerprint_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::InsecureKeyFingerprint do
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
new file mode 100644
index 00000000000..c2674638743
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+describe Gitlab::InstrumentationHelper do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '.queue_duration_for_job' do
+ where(:enqueued_at, :created_at, :time_now, :expected_duration) do
+ "2019-06-01T00:00:00.000+0000" | nil | "2019-06-01T02:00:00.000+0000" | 2.hours.to_f
+ "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.001
+ "2019-06-01T02:00:00.000+0000" | "2019-05-01T02:00:00.000+0000" | "2019-06-01T02:00:01.000+0000" | 1
+ nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.001
+ nil | nil | "2019-06-01T02:00:00.001+0000" | nil
+ "2019-06-01T02:00:00.000+0200" | nil | "2019-06-01T02:00:00.000-0200" | 4.hours.to_f
+ 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.001832
+ 1571825569 | nil | "2019-10-23T12:13:16.000+0200" | 27
+ "invalid_date" | nil | "2019-10-23T12:13:16.000+0200" | nil
+ "" | nil | "2019-10-23T12:13:16.000+0200" | nil
+ 0 | nil | "2019-10-23T12:13:16.000+0200" | nil
+ -1 | nil | "2019-10-23T12:13:16.000+0200" | nil
+ "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T00:00:00.000+0000" | 0
+ Time.at(1571999233) | nil | "2019-10-25T12:29:16.000+0200" | 123
+ end
+
+ with_them do
+ let(:job) { { 'enqueued_at' => enqueued_at, 'created_at' => created_at } }
+
+ it "returns the correct duration" do
+ Timecop.freeze(Time.iso8601(time_now)) do
+ expect(described_class.queue_duration_for_job(job)).to eq(expected_duration)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/issuable_metadata_spec.rb b/spec/lib/gitlab/issuable_metadata_spec.rb
index 032467b8b4e..7632bc3060a 100644
--- a/spec/lib/gitlab/issuable_metadata_spec.rb
+++ b/spec/lib/gitlab/issuable_metadata_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::IssuableMetadata do
diff --git a/spec/lib/gitlab/issuable_sorter_spec.rb b/spec/lib/gitlab/issuable_sorter_spec.rb
index 5bd76bc6081..486e9539b92 100644
--- a/spec/lib/gitlab/issuable_sorter_spec.rb
+++ b/spec/lib/gitlab/issuable_sorter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::IssuableSorter do
diff --git a/spec/lib/gitlab/issuables_count_for_state_spec.rb b/spec/lib/gitlab/issuables_count_for_state_spec.rb
index c262fdfcb61..9380aa53470 100644
--- a/spec/lib/gitlab/issuables_count_for_state_spec.rb
+++ b/spec/lib/gitlab/issuables_count_for_state_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::IssuablesCountForState do
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index b0b4fdc09bc..efa7fd4b975 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::JobWaiter do
diff --git a/spec/lib/gitlab/json_logger_spec.rb b/spec/lib/gitlab/json_logger_spec.rb
index 3d4f9b5db86..5d544198c40 100644
--- a/spec/lib/gitlab/json_logger_spec.rb
+++ b/spec/lib/gitlab/json_logger_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::JsonLogger do
diff --git a/spec/lib/gitlab/kubernetes/config_maps/aws_node_auth_spec.rb b/spec/lib/gitlab/kubernetes/config_maps/aws_node_auth_spec.rb
new file mode 100644
index 00000000000..f701643860a
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/config_maps/aws_node_auth_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::ConfigMaps::AwsNodeAuth do
+ describe '#generate' do
+ let(:role) { 'arn:aws:iam::123456789012:role/node-instance-role' }
+
+ let(:name) { 'aws-auth' }
+ let(:namespace) { 'kube-system' }
+ let(:role_config) do
+ [{
+ 'rolearn' => role,
+ 'username' => 'system:node:{{EC2PrivateDNSName}}',
+ 'groups' => [
+ 'system:bootstrappers',
+ 'system:nodes'
+ ]
+ }]
+ end
+
+ subject { described_class.new(role).generate }
+
+ it 'builds a Kubeclient Resource' do
+ expect(subject).to be_a(Kubeclient::Resource)
+
+ expect(subject.metadata.name).to eq(name)
+ expect(subject.metadata.namespace).to eq(namespace)
+
+ expect(YAML.safe_load(subject.data.mapRoles)).to eq(role_config)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
index 9eb3322f1a6..e5a361bdab3 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
@@ -86,33 +86,6 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
end
end
- context 'when there is no repository' do
- let(:repository) { nil }
-
- it_behaves_like 'helm commands' do
- let(:commands) do
- <<~EOS
- helm init --upgrade
- for i in $(seq 1 30); do helm version #{tls_flags} && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)
- #{helm_install_command}
- EOS
- end
-
- let(:helm_install_command) do
- <<~EOS.squish
- helm upgrade app-name chart-name
- --install
- --reset-values
- #{tls_flags}
- --version 1.2.3
- --set rbac.create\\=false,rbac.enabled\\=false
- --namespace gitlab-managed-apps
- -f /data/helm/app-name/config/values.yaml
- EOS
- end
- end
- end
-
context 'when there is a pre-install script' do
let(:preinstall) { ['/bin/date', '/bin/true'] }
diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
index 64cadcc011c..e1b4bd0b664 100644
--- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'generates the appropriate specifications for the container' do
container = subject.generate.spec.containers.first
expect(container.name).to eq('helm')
- expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.14.3-kube-1.11.10')
+ expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.1-kube-1.13.12')
expect(container.env.count).to eq(3)
expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT])
expect(container.command).to match_array(["/bin/sh"])
diff --git a/spec/lib/gitlab/kubernetes_spec.rb b/spec/lib/gitlab/kubernetes_spec.rb
index a7ea942960b..31bfd20449d 100644
--- a/spec/lib/gitlab/kubernetes_spec.rb
+++ b/spec/lib/gitlab/kubernetes_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Kubernetes do
diff --git a/spec/lib/gitlab/language_detection_spec.rb b/spec/lib/gitlab/language_detection_spec.rb
index 9636fbd401b..f558ce0d527 100644
--- a/spec/lib/gitlab/language_detection_spec.rb
+++ b/spec/lib/gitlab/language_detection_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::LanguageDetection do
diff --git a/spec/lib/gitlab/lazy_spec.rb b/spec/lib/gitlab/lazy_spec.rb
index 37a3ac74316..19758a18589 100644
--- a/spec/lib/gitlab/lazy_spec.rb
+++ b/spec/lib/gitlab/lazy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::Lazy do
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index af5df1fab43..697bedf7362 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -136,7 +136,7 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
describe '.find_all_paths' do
let(:all_dashboard_paths) { described_class.find_all_paths(project) }
- let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Default', default: true } }
+ let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Default', default: true, system_dashboard: true } }
it 'includes only the system dashboard by default' do
expect(all_dashboard_paths).to eq([system_dashboard])
@@ -147,7 +147,7 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
let(:project) { project_with_dashboard(dashboard_path) }
it 'includes system and project dashboards' do
- project_dashboard = { path: dashboard_path, display_name: 'test.yml', default: false }
+ project_dashboard = { path: dashboard_path, display_name: 'test.yml', default: false, system_dashboard: false }
expect(all_dashboard_paths).to contain_exactly(system_dashboard, project_dashboard)
end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index e2ce1869810..4fa136bc405 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -25,6 +25,14 @@ describe Gitlab::Metrics::Dashboard::Processor do
end
end
+ context 'when the dashboard is not present' do
+ let(:dashboard_yml) { nil }
+
+ it 'returns nil' do
+ expect(dashboard).to be_nil
+ end
+ end
+
context 'when dashboard config corresponds to common metrics' do
let!(:common_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
diff --git a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
index 095d0a2df78..0d4562f78f1 100644
--- a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
@@ -75,6 +75,17 @@ describe Gitlab::Metrics::Dashboard::ServiceSelector do
it { is_expected.to be Metrics::Dashboard::CustomMetricEmbedService }
end
+
+ context 'with a grafana link' do
+ let(:arguments) do
+ {
+ embedded: true,
+ grafana_url: 'https://grafana.example.com'
+ }
+ end
+
+ it { is_expected.to be Metrics::Dashboard::GrafanaMetricEmbedService }
+ end
end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
new file mode 100644
index 00000000000..5c2ec6dae6b
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Metrics::Dashboard::Stages::GrafanaFormatter do
+ include GrafanaApiHelpers
+
+ let_it_be(:namespace) { create(:namespace, name: 'foo') }
+ let_it_be(:project) { create(:project, namespace: namespace, name: 'bar') }
+
+ describe '#transform!' do
+ let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
+ let(:datasource) { JSON.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
+
+ let(:dashboard) { described_class.new(project, {}, params).transform! }
+
+ let(:params) do
+ {
+ grafana_dashboard: grafana_dashboard,
+ datasource: datasource,
+ grafana_url: valid_grafana_dashboard_link('https://grafana.example.com')
+ }
+ end
+
+ context 'when the query and resources are configured correctly' do
+ let(:expected_dashboard) { JSON.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) }
+
+ it 'generates a gitlab-yml formatted dashboard' do
+ expect(dashboard).to eq(expected_dashboard)
+ end
+ end
+
+ context 'when the inputs are invalid' do
+ shared_examples_for 'processing error' do
+ it 'raises a processing error' do
+ expect { dashboard }
+ .to raise_error(Gitlab::Metrics::Dashboard::Stages::InputFormatValidator::DashboardProcessingError)
+ end
+ end
+
+ context 'when the datasource is not proxyable' do
+ before do
+ params[:datasource][:access] = 'not-proxy'
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when query param "panelId" is not specified' do
+ before do
+ params[:grafana_url].gsub!('panelId=8', '')
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when query param "from" is not specified' do
+ before do
+ params[:grafana_url].gsub!('from=1570397739557', '')
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when query param "to" is not specified' do
+ before do
+ params[:grafana_url].gsub!('to=1570484139557', '')
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when the panel is not a graph' do
+ before do
+ params[:grafana_dashboard][:dashboard][:panels][0][:type] = 'singlestat'
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when the panel is not a line graph' do
+ before do
+ params[:grafana_dashboard][:dashboard][:panels][0][:lines] = false
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when the query dashboard includes undefined variables' do
+ before do
+ params[:grafana_url].gsub!('&var-instance=localhost:9121', '')
+ end
+
+ it_behaves_like 'processing error'
+ end
+
+ context 'when the expression contains unsupported global variables' do
+ before do
+ params[:grafana_dashboard][:dashboard][:panels][0][:targets][0][:expr] = 'sum(important_metric[$__interval_ms])'
+ end
+
+ it_behaves_like 'processing error'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
index e0dc6d98efc..daaf66cba46 100644
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -3,13 +3,41 @@
require 'spec_helper'
describe Gitlab::Metrics::Dashboard::Url do
- describe '#regex' do
- it 'returns a regular expression' do
- expect(described_class.regex).to be_a Regexp
- end
+ shared_examples_for 'a regex which matches the expected url' do
+ it { is_expected.to be_a Regexp }
it 'matches a metrics dashboard link with named params' do
- url = Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url(
+ expect(subject).to match url
+
+ subject.match(url) do |m|
+ expect(m.named_captures).to eq expected_params
+ end
+ end
+ end
+
+ shared_examples_for 'does not match non-matching urls' do
+ it 'does not match other gitlab urls that contain the term metrics' do
+ url = Gitlab::Routing.url_helpers.active_common_namespace_project_prometheus_metrics_url('foo', 'bar', :json)
+
+ expect(subject).not_to match url
+ end
+
+ it 'does not match other gitlab urls' do
+ url = Gitlab.config.gitlab.url
+
+ expect(subject).not_to match url
+ end
+
+ it 'does not match non-gitlab urls' do
+ url = 'https://www.super_awesome_site.com/'
+
+ expect(subject).not_to match url
+ end
+ end
+
+ describe '#regex' do
+ let(:url) do
+ Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url(
'foo',
'bar',
1,
@@ -18,8 +46,10 @@ describe Gitlab::Metrics::Dashboard::Url do
group: 'awesome group',
anchor: 'title'
)
+ end
- expected_params = {
+ let(:expected_params) do
+ {
'url' => url,
'namespace' => 'foo',
'project' => 'bar',
@@ -27,31 +57,40 @@ describe Gitlab::Metrics::Dashboard::Url do
'query' => '?dashboard=config%2Fprometheus%2Fcommon_metrics.yml&group=awesome+group&start=2019-08-02T05%3A43%3A09.000Z',
'anchor' => '#title'
}
-
- expect(described_class.regex).to match url
-
- described_class.regex.match(url) do |m|
- expect(m.named_captures).to eq expected_params
- end
end
- it 'does not match other gitlab urls that contain the term metrics' do
- url = Gitlab::Routing.url_helpers.active_common_namespace_project_prometheus_metrics_url('foo', 'bar', :json)
+ subject { described_class.regex }
- expect(described_class.regex).not_to match url
- end
+ it_behaves_like 'a regex which matches the expected url'
+ it_behaves_like 'does not match non-matching urls'
+ end
- it 'does not match other gitlab urls' do
- url = Gitlab.config.gitlab.url
+ describe '#grafana_regex' do
+ let(:url) do
+ Gitlab::Routing.url_helpers.namespace_project_grafana_api_metrics_dashboard_url(
+ 'foo',
+ 'bar',
+ start: '2019-08-02T05:43:09.000Z',
+ dashboard: 'config/prometheus/common_metrics.yml',
+ group: 'awesome group',
+ anchor: 'title'
+ )
+ end
- expect(described_class.regex).not_to match url
+ let(:expected_params) do
+ {
+ 'url' => url,
+ 'namespace' => 'foo',
+ 'project' => 'bar',
+ 'query' => '?dashboard=config%2Fprometheus%2Fcommon_metrics.yml&group=awesome+group&start=2019-08-02T05%3A43%3A09.000Z',
+ 'anchor' => '#title'
+ }
end
- it 'does not match non-gitlab urls' do
- url = 'https://www.super_awesome_site.com/'
+ subject { described_class.grafana_regex }
- expect(described_class.regex).not_to match url
- end
+ it_behaves_like 'a regex which matches the expected url'
+ it_behaves_like 'does not match non-matching urls'
end
describe '#build_dashboard_url' do
diff --git a/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
index 99349934e63..f22993cf057 100644
--- a/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
@@ -4,61 +4,41 @@ require 'spec_helper'
describe Gitlab::Metrics::Exporter::WebExporter do
let(:exporter) { described_class.new }
-
- context 'when blackout seconds is used' do
- let(:blackout_seconds) { 0 }
- let(:readiness_probe) { exporter.send(:readiness_probe).execute }
-
- before do
- stub_config(
- monitoring: {
- web_exporter: {
- enabled: true,
- port: 0,
- address: '127.0.0.1',
- blackout_seconds: blackout_seconds
- }
+ let(:readiness_probe) { exporter.send(:readiness_probe).execute }
+
+ before do
+ stub_config(
+ monitoring: {
+ web_exporter: {
+ enabled: true,
+ port: 0,
+ address: '127.0.0.1'
}
- )
-
- exporter.start
- end
-
- after do
- exporter.stop
- end
+ }
+ )
- context 'when running server' do
- it 'readiness probe returns succesful status' do
- expect(readiness_probe.http_status).to eq(200)
- expect(readiness_probe.json).to include(status: 'ok')
- expect(readiness_probe.json).to include('web_exporter' => [{ 'status': 'ok' }])
- end
- end
-
- context 'when blackout seconds is 10s' do
- let(:blackout_seconds) { 10 }
+ exporter.start
+ end
- it 'readiness probe returns a failure status' do
- # during sleep we check the status of readiness probe
- expect(exporter).to receive(:sleep).with(10) do
- expect(readiness_probe.http_status).to eq(503)
- expect(readiness_probe.json).to include(status: 'failed')
- expect(readiness_probe.json).to include('web_exporter' => [{ 'status': 'failed' }])
- end
+ after do
+ exporter.stop
+ end
- exporter.stop
- end
+ context 'when running server' do
+ it 'readiness probe returns succesful status' do
+ expect(readiness_probe.http_status).to eq(200)
+ expect(readiness_probe.json).to include(status: 'ok')
+ expect(readiness_probe.json).to include('web_exporter' => [{ 'status': 'ok' }])
end
+ end
- context 'when blackout is disabled' do
- let(:blackout_seconds) { 0 }
-
- it 'readiness probe returns a failure status' do
- expect(exporter).not_to receive(:sleep)
+ describe '#mark_as_not_running!' do
+ it 'readiness probe returns a failure status' do
+ exporter.mark_as_not_running!
- exporter.stop
- end
+ expect(readiness_probe.http_status).to eq(503)
+ expect(readiness_probe.json).to include(status: 'failed')
+ expect(readiness_probe.json).to include('web_exporter' => [{ 'status': 'failed' }])
end
end
end
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index f48cd096a98..335670278c4 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -31,7 +31,7 @@ describe Gitlab::Metrics::RequestsRackMiddleware do
end
it 'measures execution time' do
- expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ status: 200, method: 'get' }, a_positive_execution_time)
+ expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ status: '200', method: 'get' }, a_positive_execution_time)
Timecop.scale(3600) { subject.call(env) }
end
@@ -69,7 +69,7 @@ describe Gitlab::Metrics::RequestsRackMiddleware do
expected_labels = []
described_class::HTTP_METHODS.each do |method, statuses|
statuses.each do |status|
- expected_labels << { method: method, status: status.to_i }
+ expected_labels << { method: method, status: status.to_s }
end
end
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
new file mode 100644
index 00000000000..9c7dd385726
--- /dev/null
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -0,0 +1,215 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Pagination::OffsetPagination do
+ let(:resource) { Project.all }
+ let(:custom_port) { 8080 }
+ let(:incoming_api_projects_url) { "#{Gitlab.config.gitlab.url}:#{custom_port}/api/v4/projects" }
+
+ before do
+ stub_config_setting(port: custom_port)
+ end
+
+ let(:request_context) { double("request_context") }
+
+ subject do
+ described_class.new(request_context)
+ end
+
+ describe '#paginate' do
+ let(:value) { spy('return value') }
+ let(:base_query) { { foo: 'bar', bar: 'baz' } }
+ let(:query) { base_query }
+
+ before do
+ allow(request_context).to receive(:header).and_return(value)
+ allow(request_context).to receive(:params).and_return(query)
+ allow(request_context).to receive(:request).and_return(double(url: "#{incoming_api_projects_url}?#{query.to_query}"))
+ end
+
+ context 'when resource can be paginated' do
+ before do
+ create_list(:project, 3)
+ end
+
+ describe 'first page' do
+ shared_examples 'response with pagination headers' do
+ it 'adds appropriate headers' do
+ expect_header('X-Total', '3')
+ expect_header('X-Total-Pages', '2')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+ expect_header('X-Next-Page', '2')
+ expect_header('X-Prev-Page', '')
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="last"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next"))
+ expect(val).not_to include('rel="prev"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+
+ shared_examples 'paginated response' do
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 2
+ end
+
+ it 'executes only one SELECT COUNT query' do
+ expect { subject.paginate(resource) }.to make_queries_matching(/SELECT COUNT/, 1)
+ end
+ end
+
+ let(:query) { base_query.merge(page: 1, per_page: 2) }
+
+ context 'when the api_kaminari_count_with_limit feature flag is unset' do
+ it_behaves_like 'paginated response'
+ it_behaves_like 'response with pagination headers'
+ end
+
+ context 'when the api_kaminari_count_with_limit feature flag is disabled' do
+ before do
+ stub_feature_flags(api_kaminari_count_with_limit: false)
+ end
+
+ it_behaves_like 'paginated response'
+ it_behaves_like 'response with pagination headers'
+ end
+
+ context 'when the api_kaminari_count_with_limit feature flag is enabled' do
+ before do
+ stub_feature_flags(api_kaminari_count_with_limit: true)
+ end
+
+ context 'when resources count is less than MAX_COUNT_LIMIT' do
+ before do
+ stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 4)
+ end
+
+ it_behaves_like 'paginated response'
+ it_behaves_like 'response with pagination headers'
+ end
+
+ context 'when resources count is more than MAX_COUNT_LIMIT' do
+ before do
+ stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 2)
+ end
+
+ it_behaves_like 'paginated response'
+
+ it 'does not return the X-Total and X-Total-Pages headers' do
+ expect_no_header('X-Total')
+ expect_no_header('X-Total-Pages')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+ expect_header('X-Next-Page', '2')
+ expect_header('X-Prev-Page', '')
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next"))
+ expect(val).not_to include('rel="last"')
+ expect(val).not_to include('rel="prev"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+ end
+ end
+
+ describe 'second page' do
+ let(:query) { base_query.merge(page: 2, per_page: 2) }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+
+ it 'adds appropriate headers' do
+ expect_header('X-Total', '3')
+ expect_header('X-Total-Pages', '2')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '2')
+ expect_header('X-Next-Page', '')
+ expect_header('X-Prev-Page', '1')
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="last"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="prev"))
+ expect(val).not_to include('rel="next"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+
+ context 'if order' do
+ it 'is not present it adds default order(:id) if no order is present' do
+ resource.order_values = []
+
+ paginated_relation = subject.paginate(resource)
+
+ expect(resource.order_values).to be_empty
+ expect(paginated_relation.order_values).to be_present
+ expect(paginated_relation.order_values.first).to be_ascending
+ expect(paginated_relation.order_values.first.expr.name).to eq 'id'
+ end
+
+ it 'is present it does not add anything' do
+ paginated_relation = subject.paginate(resource.order(created_at: :desc))
+
+ expect(paginated_relation.order_values).to be_present
+ expect(paginated_relation.order_values.first).to be_descending
+ expect(paginated_relation.order_values.first.expr.name).to eq 'created_at'
+ end
+ end
+ end
+
+ context 'when resource empty' do
+ describe 'first page' do
+ let(:query) { base_query.merge(page: 1, per_page: 2) }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 0
+ end
+
+ it 'adds appropriate headers' do
+ expect_header('X-Total', '0')
+ expect_header('X-Total-Pages', '1')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+ expect_header('X-Next-Page', '')
+ expect_header('X-Prev-Page', '')
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="last"))
+ expect(val).not_to include('rel="prev"')
+ expect(val).not_to include('rel="next"')
+ expect(val).not_to include('page=0')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+ end
+ end
+
+ def expect_header(*args, &block)
+ expect(subject).to receive(:header).with(*args, &block)
+ end
+
+ def expect_no_header(*args, &block)
+ expect(subject).not_to receive(:header).with(*args)
+ end
+
+ def expect_message(method)
+ expect(subject).to receive(method)
+ .at_least(:once).and_return(value)
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/project_creator_spec.rb b/spec/lib/gitlab/phabricator_import/project_creator_spec.rb
index e9455b866ac..fd17284eea2 100644
--- a/spec/lib/gitlab/phabricator_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/project_creator_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::PhabricatorImport::ProjectCreator do
subject(:creator) { described_class.new(user, params) }
describe '#execute' do
- it 'creates a project correctly and schedule an import' do
+ it 'creates a project correctly and schedule an import', :sidekiq_might_not_need_inline do
expect_next_instance_of(Gitlab::PhabricatorImport::Importer) do |importer|
expect(importer).to receive(:execute)
end
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index 82ccb42f8a6..6e5c36172e2 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -3,48 +3,55 @@
require 'spec_helper'
describe Gitlab::ProjectAuthorizations do
- let(:group) { create(:group) }
- let!(:owned_project) { create(:project) }
- let!(:other_project) { create(:project) }
- let!(:group_project) { create(:project, namespace: group) }
-
- let(:user) { owned_project.namespace.owner }
-
def map_access_levels(rows)
rows.each_with_object({}) do |row, hash|
hash[row.project_id] = row.access_level
end
end
- before do
- other_project.add_reporter(user)
- group.add_developer(user)
- end
-
- let(:authorizations) do
+ subject(:authorizations) do
described_class.new(user).calculate
end
- it 'returns the correct number of authorizations' do
- expect(authorizations.length).to eq(3)
- end
+ context 'user added to group and project' do
+ let(:group) { create(:group) }
+ let!(:other_project) { create(:project) }
+ let!(:group_project) { create(:project, namespace: group) }
+ let!(:owned_project) { create(:project) }
+ let(:user) { owned_project.namespace.owner }
- it 'includes the correct projects' do
- expect(authorizations.pluck(:project_id))
- .to include(owned_project.id, other_project.id, group_project.id)
- end
+ before do
+ other_project.add_reporter(user)
+ group.add_developer(user)
+ end
+
+ it 'returns the correct number of authorizations' do
+ expect(authorizations.length).to eq(3)
+ end
- it 'includes the correct access levels' do
- mapping = map_access_levels(authorizations)
+ it 'includes the correct projects' do
+ expect(authorizations.pluck(:project_id))
+ .to include(owned_project.id, other_project.id, group_project.id)
+ end
+
+ it 'includes the correct access levels' do
+ mapping = map_access_levels(authorizations)
- expect(mapping[owned_project.id]).to eq(Gitlab::Access::MAINTAINER)
- expect(mapping[other_project.id]).to eq(Gitlab::Access::REPORTER)
- expect(mapping[group_project.id]).to eq(Gitlab::Access::DEVELOPER)
+ expect(mapping[owned_project.id]).to eq(Gitlab::Access::MAINTAINER)
+ expect(mapping[other_project.id]).to eq(Gitlab::Access::REPORTER)
+ expect(mapping[group_project.id]).to eq(Gitlab::Access::DEVELOPER)
+ end
end
context 'with nested groups' do
+ let(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
let!(:nested_project) { create(:project, namespace: nested_group) }
+ let(:user) { create(:user) }
+
+ before do
+ group.add_developer(user)
+ end
it 'includes nested groups' do
expect(authorizations.pluck(:project_id)).to include(nested_project.id)
@@ -64,4 +71,114 @@ describe Gitlab::ProjectAuthorizations do
expect(mapping[nested_project.id]).to eq(Gitlab::Access::MAINTAINER)
end
end
+
+ context 'with shared groups' do
+ let(:parent_group_user) { create(:user) }
+ let(:group_user) { create(:user) }
+ let(:child_group_user) { create(:user) }
+
+ let_it_be(:group_parent) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: group_parent) }
+ let_it_be(:group_child) { create(:group, :private, parent: group) }
+
+ let_it_be(:shared_group_parent) { create(:group, :private) }
+ let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
+
+ let_it_be(:project_parent) { create(:project, group: shared_group_parent) }
+ let_it_be(:project) { create(:project, group: shared_group) }
+ let_it_be(:project_child) { create(:project, group: shared_group_child) }
+
+ before do
+ group_parent.add_owner(parent_group_user)
+ group.add_owner(group_user)
+ group_child.add_owner(child_group_user)
+
+ create(:group_group_link, shared_group: shared_group, shared_with_group: group)
+ end
+
+ context 'when feature flag share_group_with_group is enabled' do
+ before do
+ stub_feature_flags(share_group_with_group: true)
+ end
+
+ context 'group user' do
+ let(:user) { group_user }
+
+ it 'creates proper authorizations' do
+ mapping = map_access_levels(authorizations)
+
+ expect(mapping[project_parent.id]).to be_nil
+ expect(mapping[project.id]).to eq(Gitlab::Access::DEVELOPER)
+ expect(mapping[project_child.id]).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+
+ context 'parent group user' do
+ let(:user) { parent_group_user }
+
+ it 'creates proper authorizations' do
+ mapping = map_access_levels(authorizations)
+
+ expect(mapping[project_parent.id]).to be_nil
+ expect(mapping[project.id]).to be_nil
+ expect(mapping[project_child.id]).to be_nil
+ end
+ end
+
+ context 'child group user' do
+ let(:user) { child_group_user }
+
+ it 'creates proper authorizations' do
+ mapping = map_access_levels(authorizations)
+
+ expect(mapping[project_parent.id]).to be_nil
+ expect(mapping[project.id]).to be_nil
+ expect(mapping[project_child.id]).to be_nil
+ end
+ end
+ end
+
+ context 'when feature flag share_group_with_group is disabled' do
+ before do
+ stub_feature_flags(share_group_with_group: false)
+ end
+
+ context 'group user' do
+ let(:user) { group_user }
+
+ it 'creates proper authorizations' do
+ mapping = map_access_levels(authorizations)
+
+ expect(mapping[project_parent.id]).to be_nil
+ expect(mapping[project.id]).to be_nil
+ expect(mapping[project_child.id]).to be_nil
+ end
+ end
+
+ context 'parent group user' do
+ let(:user) { parent_group_user }
+
+ it 'creates proper authorizations' do
+ mapping = map_access_levels(authorizations)
+
+ expect(mapping[project_parent.id]).to be_nil
+ expect(mapping[project.id]).to be_nil
+ expect(mapping[project_child.id]).to be_nil
+ end
+ end
+
+ context 'child group user' do
+ let(:user) { child_group_user }
+
+ it 'creates proper authorizations' do
+ mapping = map_access_levels(authorizations)
+
+ expect(mapping[project_parent.id]).to be_nil
+ expect(mapping[project.id]).to be_nil
+ expect(mapping[project_child.id]).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index d6e50c672e6..99078f19361 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -79,20 +79,20 @@ describe Gitlab::ProjectSearchResults do
end
it 'finds by name' do
- expect(results.map(&:filename)).to include(expected_file_by_name)
+ expect(results.map(&:path)).to include(expected_file_by_path)
end
- it "loads all blobs for filename matches in single batch" do
+ it "loads all blobs for path matches in single batch" do
expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original
expected = project.repository.search_files_by_name(query, 'master')
- expect(results.map(&:filename)).to include(*expected)
+ expect(results.map(&:path)).to include(*expected)
end
it 'finds by content' do
- blob = results.select { |result| result.filename == expected_file_by_content }.flatten.last
+ blob = results.select { |result| result.path == expected_file_by_content }.flatten.last
- expect(blob.filename).to eq(expected_file_by_content)
+ expect(blob.path).to eq(expected_file_by_content)
end
end
@@ -146,7 +146,7 @@ describe Gitlab::ProjectSearchResults do
let(:blob_type) { 'blobs' }
let(:disabled_project) { create(:project, :public, :repository, :repository_disabled) }
let(:private_project) { create(:project, :public, :repository, :repository_private) }
- let(:expected_file_by_name) { 'files/images/wm.svg' }
+ let(:expected_file_by_path) { 'files/images/wm.svg' }
let(:expected_file_by_content) { 'CHANGELOG' }
end
@@ -169,7 +169,7 @@ describe Gitlab::ProjectSearchResults do
let(:blob_type) { 'wiki_blobs' }
let(:disabled_project) { create(:project, :public, :wiki_repo, :wiki_disabled) }
let(:private_project) { create(:project, :public, :wiki_repo, :wiki_private) }
- let(:expected_file_by_name) { 'Files/Title.md' }
+ let(:expected_file_by_path) { 'Files/Title.md' }
let(:expected_file_by_content) { 'CHANGELOG.md' }
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index 83acd979a80..5559b1e4291 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -22,7 +22,8 @@ describe Gitlab::ProjectTemplate do
described_class.new('nfjekyll', 'Netlify/Jekyll', _('A Jekyll site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfjekyll'),
described_class.new('nfplainhtml', 'Netlify/Plain HTML', _('A plain HTML site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfplain-html'),
described_class.new('nfgitbook', 'Netlify/GitBook', _('A GitBook site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfgitbook'),
- described_class.new('nfhexo', 'Netlify/Hexo', _('A Hexo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhexo')
+ described_class.new('nfhexo', 'Netlify/Hexo', _('A Hexo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhexo'),
+ described_class.new('serverless_framework', 'Serverless Framework/JS', _('A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages'), 'https://gitlab.com/gitlab-org/project-templates/serverless-framework', 'illustrations/logos/serverless_framework.svg')
]
expect(described_class.all).to be_an(Array)
diff --git a/spec/lib/gitlab/prometheus/internal_spec.rb b/spec/lib/gitlab/prometheus/internal_spec.rb
new file mode 100644
index 00000000000..884bdcb4e9b
--- /dev/null
+++ b/spec/lib/gitlab/prometheus/internal_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Prometheus::Internal do
+ let(:listen_address) { 'localhost:9090' }
+
+ let(:prometheus_settings) do
+ {
+ enable: true,
+ listen_address: listen_address
+ }
+ end
+
+ before do
+ stub_config(prometheus: prometheus_settings)
+ end
+
+ describe '.uri' do
+ shared_examples 'returns valid uri' do |uri_string|
+ it do
+ expect(described_class.uri).to eq(uri_string)
+ expect { Addressable::URI.parse(described_class.uri) }.not_to raise_error
+ end
+ end
+
+ it_behaves_like 'returns valid uri', 'http://localhost:9090'
+
+ context 'with non default prometheus address' do
+ let(:listen_address) { 'https://localhost:9090' }
+
+ it_behaves_like 'returns valid uri', 'https://localhost:9090'
+
+ context 'with :9090 symbol' do
+ let(:listen_address) { :':9090' }
+
+ it_behaves_like 'returns valid uri', 'http://localhost:9090'
+ end
+
+ context 'with 0.0.0.0:9090' do
+ let(:listen_address) { '0.0.0.0:9090' }
+
+ it_behaves_like 'returns valid uri', 'http://localhost:9090'
+ end
+ end
+
+ context 'when listen_address is nil' do
+ let(:listen_address) { nil }
+
+ it 'does not fail' do
+ expect(described_class.uri).to eq(nil)
+ end
+ end
+
+ context 'when prometheus listen address is blank in gitlab.yml' do
+ let(:listen_address) { '' }
+
+ it 'does not configure prometheus' do
+ expect(described_class.uri).to eq(nil)
+ end
+ end
+ end
+
+ describe 'prometheus_enabled?' do
+ it 'returns correct value' do
+ expect(described_class.prometheus_enabled?).to eq(true)
+ end
+
+ context 'when prometheus setting is disabled in gitlab.yml' do
+ let(:prometheus_settings) do
+ {
+ enable: false,
+ listen_address: listen_address
+ }
+ end
+
+ it 'returns correct value' do
+ expect(described_class.prometheus_enabled?).to eq(false)
+ end
+ end
+
+ context 'when prometheus setting is not present in gitlab.yml' do
+ before do
+ allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
+ end
+
+ it 'does not fail' do
+ expect(described_class.prometheus_enabled?).to eq(false)
+ end
+ end
+ end
+
+ describe '.listen_address' do
+ it 'returns correct value' do
+ expect(described_class.listen_address).to eq(listen_address)
+ end
+
+ context 'when prometheus setting is not present in gitlab.yml' do
+ before do
+ allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
+ end
+
+ it 'does not fail' do
+ expect(described_class.listen_address).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb b/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb
index 7f6283715f2..6361893c53c 100644
--- a/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb
@@ -13,14 +13,19 @@ describe Gitlab::Prometheus::Queries::KnativeInvocationQuery do
context 'verify queries' do
before do
- allow(PrometheusMetric).to receive(:find_by_identifier).and_return(create(:prometheus_metric, query: prometheus_istio_query('test-name', 'test-ns')))
- allow(client).to receive(:query_range)
+ create(:prometheus_metric,
+ :common,
+ identifier: :system_metrics_knative_function_invocation_count,
+ query: 'sum(ceil(rate(istio_requests_total{destination_service_namespace="%{kube_namespace}", destination_app=~"%{function_name}.*"}[1m])*60))')
end
it 'has the query, but no data' do
- results = subject.query(serverless_func.id)
+ expect(client).to receive(:query_range).with(
+ 'sum(ceil(rate(istio_requests_total{destination_service_namespace="test-ns", destination_app=~"test-name.*"}[1m])*60))',
+ hash_including(:start, :stop)
+ )
- expect(results.queries[0][:query_range]).to eql('floor(sum(rate(istio_revision_request_count{destination_configuration="test-name", destination_namespace="test-ns"}[1m])*30))')
+ subject.query(serverless_func.id)
end
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index b557baed258..1397add9f5a 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -66,6 +66,15 @@ describe Gitlab::Regex do
end
describe '.aws_account_id_regex' do
+ subject { described_class.aws_account_id_regex }
+
+ it { is_expected.to match('123456789012') }
+ it { is_expected.not_to match('12345678901') }
+ it { is_expected.not_to match('1234567890123') }
+ it { is_expected.not_to match('12345678901a') }
+ end
+
+ describe '.aws_arn_regex' do
subject { described_class.aws_arn_regex }
it { is_expected.to match('arn:aws:iam::123456789012:role/role-name') }
@@ -75,4 +84,14 @@ describe Gitlab::Regex do
it { is_expected.not_to match('123456789012') }
it { is_expected.not_to match('role/role-name') }
end
+
+ describe '.utc_date_regex' do
+ subject { described_class.utc_date_regex }
+
+ it { is_expected.to match('2019-10-20') }
+ it { is_expected.to match('1990-01-01') }
+ it { is_expected.not_to match('11-1234-90') }
+ it { is_expected.not_to match('aa-1234-cc') }
+ it { is_expected.not_to match('9/9/2018') }
+ end
end
diff --git a/spec/lib/gitlab/search/found_blob_spec.rb b/spec/lib/gitlab/search/found_blob_spec.rb
index a575f6e2f11..07842faa638 100644
--- a/spec/lib/gitlab/search/found_blob_spec.rb
+++ b/spec/lib/gitlab/search/found_blob_spec.rb
@@ -15,7 +15,6 @@ describe Gitlab::Search::FoundBlob do
is_expected.to be_an described_class
expect(subject.id).to be_nil
expect(subject.path).to eq('CHANGELOG')
- expect(subject.filename).to eq('CHANGELOG')
expect(subject.basename).to eq('CHANGELOG')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(188)
@@ -25,12 +24,12 @@ describe Gitlab::Search::FoundBlob do
it 'does not parse content if not needed' do
expect(subject).not_to receive(:parse_search_result)
expect(subject.project_id).to eq(project.id)
- expect(subject.binary_filename).to eq('CHANGELOG')
+ expect(subject.binary_path).to eq('CHANGELOG')
end
it 'parses content only once when needed' do
expect(subject).to receive(:parse_search_result).once.and_call_original
- expect(subject.filename).to eq('CHANGELOG')
+ expect(subject.path).to eq('CHANGELOG')
expect(subject.startline).to eq(188)
end
@@ -38,7 +37,7 @@ describe Gitlab::Search::FoundBlob do
let(:search_result) { "master:testdata/project::function1.yaml\x001\x00---\n" }
it 'returns a valid FoundBlob' do
- expect(subject.filename).to eq('testdata/project::function1.yaml')
+ expect(subject.path).to eq('testdata/project::function1.yaml')
expect(subject.basename).to eq('testdata/project::function1')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(1)
@@ -50,7 +49,7 @@ describe Gitlab::Search::FoundBlob do
let(:search_result) { "master:testdata/foo.txt\x001\x00blah:9:blah" }
it 'returns a valid FoundBlob' do
- expect(subject.filename).to eq('testdata/foo.txt')
+ expect(subject.path).to eq('testdata/foo.txt')
expect(subject.basename).to eq('testdata/foo')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(1)
@@ -62,7 +61,7 @@ describe Gitlab::Search::FoundBlob do
let(:search_result) { "master:testdata/foo.txt\x001\x00blah\x001\x00foo" }
it 'returns a valid FoundBlob' do
- expect(subject.filename).to eq('testdata/foo.txt')
+ expect(subject.path).to eq('testdata/foo.txt')
expect(subject.basename).to eq('testdata/foo')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(1)
@@ -74,7 +73,7 @@ describe Gitlab::Search::FoundBlob do
let(:results) { project.repository.search_files_by_content('Role models', 'master') }
it 'returns a valid FoundBlob that ends with an empty line' do
- expect(subject.filename).to eq('files/markdown/ruby-style-guide.md')
+ expect(subject.path).to eq('files/markdown/ruby-style-guide.md')
expect(subject.basename).to eq('files/markdown/ruby-style-guide')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(1)
@@ -87,7 +86,7 @@ describe Gitlab::Search::FoundBlob do
let(:results) { project.repository.search_files_by_content('файл', 'master') }
it 'returns results as UTF-8' do
- expect(subject.filename).to eq('encoding/russian.rb')
+ expect(subject.path).to eq('encoding/russian.rb')
expect(subject.basename).to eq('encoding/russian')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(1)
@@ -99,7 +98,7 @@ describe Gitlab::Search::FoundBlob do
let(:results) { project.repository.search_files_by_content('webhook', 'master') }
it 'returns results as UTF-8' do
- expect(subject.filename).to eq('encoding/テスト.txt')
+ expect(subject.path).to eq('encoding/テスト.txt')
expect(subject.basename).to eq('encoding/テスト')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(3)
@@ -111,7 +110,7 @@ describe Gitlab::Search::FoundBlob do
let(:search_result) { (+"master:encoding/iso8859.txt\x001\x00\xC4\xFC\nmaster:encoding/iso8859.txt\x002\x00\nmaster:encoding/iso8859.txt\x003\x00foo\n").force_encoding(Encoding::ASCII_8BIT) }
it 'returns results as UTF-8' do
- expect(subject.filename).to eq('encoding/iso8859.txt')
+ expect(subject.path).to eq('encoding/iso8859.txt')
expect(subject.basename).to eq('encoding/iso8859')
expect(subject.ref).to eq('master')
expect(subject.startline).to eq(1)
@@ -124,7 +123,6 @@ describe Gitlab::Search::FoundBlob do
let(:search_result) { "master:CONTRIBUTE.md\x005\x00- [Contribute to GitLab](#contribute-to-gitlab)\n" }
it { expect(subject.path).to eq('CONTRIBUTE.md') }
- it { expect(subject.filename).to eq('CONTRIBUTE.md') }
it { expect(subject.basename).to eq('CONTRIBUTE') }
end
@@ -132,7 +130,6 @@ describe Gitlab::Search::FoundBlob do
let(:search_result) { "master:a/b/c.md\x005\x00a b c\n" }
it { expect(subject.path).to eq('a/b/c.md') }
- it { expect(subject.filename).to eq('a/b/c.md') }
it { expect(subject.basename).to eq('a/b/c') }
end
end
@@ -141,7 +138,7 @@ describe Gitlab::Search::FoundBlob do
context 'when file is under directory' do
let(:path) { 'a/b/c.md' }
- subject { described_class.new(blob_filename: path, project: project, ref: 'master') }
+ subject { described_class.new(blob_path: path, project: project, ref: 'master') }
before do
allow(Gitlab::Git::Blob).to receive(:batch).and_return([
@@ -150,7 +147,6 @@ describe Gitlab::Search::FoundBlob do
end
it { expect(subject.path).to eq('a/b/c.md') }
- it { expect(subject.filename).to eq('a/b/c.md') }
it { expect(subject.basename).to eq('a/b/c') }
context 'when filename has multiple extensions' do
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index a17e9a31212..eefc548a4d9 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -310,18 +310,18 @@ describe Gitlab::Shell do
let(:disk_path) { "#{project.disk_path}.git" }
it 'returns true when the command succeeds' do
- expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(true)
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, disk_path)).to be(true)
expect(gitlab_shell.remove_repository(project.repository_storage, project.disk_path)).to be(true)
- expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(false)
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, disk_path)).to be(false)
end
it 'keeps the namespace directory' do
gitlab_shell.remove_repository(project.repository_storage, project.disk_path)
- expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(false)
- expect(gitlab_shell.exists?(project.repository_storage, project.disk_path.gsub(project.name, ''))).to be(true)
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, disk_path)).to be(false)
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, project.disk_path.gsub(project.name, ''))).to be(true)
end
end
@@ -332,18 +332,18 @@ describe Gitlab::Shell do
old_path = project2.disk_path
new_path = "project/new_path"
- expect(gitlab_shell.exists?(project2.repository_storage, "#{old_path}.git")).to be(true)
- expect(gitlab_shell.exists?(project2.repository_storage, "#{new_path}.git")).to be(false)
+ expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{old_path}.git")).to be(true)
+ expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{new_path}.git")).to be(false)
expect(gitlab_shell.mv_repository(project2.repository_storage, old_path, new_path)).to be_truthy
- expect(gitlab_shell.exists?(project2.repository_storage, "#{old_path}.git")).to be(false)
- expect(gitlab_shell.exists?(project2.repository_storage, "#{new_path}.git")).to be(true)
+ expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{old_path}.git")).to be(false)
+ expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{new_path}.git")).to be(true)
end
it 'returns false when the command fails' do
expect(gitlab_shell.mv_repository(project2.repository_storage, project2.disk_path, '')).to be_falsy
- expect(gitlab_shell.exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true)
+ expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true)
end
end
@@ -401,68 +401,48 @@ describe Gitlab::Shell do
describe '#add_namespace' do
it 'creates a namespace' do
- subject.add_namespace(storage, "mepmep")
+ Gitlab::GitalyClient::NamespaceService.allow { subject.add_namespace(storage, "mepmep") }
- expect(subject.exists?(storage, "mepmep")).to be(true)
+ expect(TestEnv.storage_dir_exists?(storage, "mepmep")).to be(true)
end
end
- describe '#exists?' do
- context 'when the namespace does not exist' do
+ describe '#repository_exists?' do
+ context 'when the repository does not exist' do
it 'returns false' do
- expect(subject.exists?(storage, "non-existing")).to be(false)
+ expect(subject.repository_exists?(storage, "non-existing.git")).to be(false)
end
end
- context 'when the namespace exists' do
+ context 'when the repository exists' do
it 'returns true' do
- subject.add_namespace(storage, "mepmep")
+ project = create(:project, :repository, :legacy_storage)
- expect(subject.exists?(storage, "mepmep")).to be(true)
+ expect(subject.repository_exists?(storage, project.repository.disk_path + ".git")).to be(true)
end
end
end
- describe '#repository_exists?' do
- context 'when the storage path does not exist' do
- subject { described_class.new.repository_exists?(storage, "non-existing.git") }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when the repository does not exist' do
- let(:project) { create(:project, :repository, :legacy_storage) }
-
- subject { described_class.new.repository_exists?(storage, "#{project.repository.disk_path}-some-other-repo.git") }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when the repository exists' do
- let(:project) { create(:project, :repository, :legacy_storage) }
-
- subject { described_class.new.repository_exists?(storage, "#{project.repository.disk_path}.git") }
-
- it { is_expected.to be_truthy }
- end
- end
-
describe '#remove' do
it 'removes the namespace' do
- subject.add_namespace(storage, "mepmep")
- subject.rm_namespace(storage, "mepmep")
+ Gitlab::GitalyClient::NamespaceService.allow do
+ subject.add_namespace(storage, "mepmep")
+ subject.rm_namespace(storage, "mepmep")
+ end
- expect(subject.exists?(storage, "mepmep")).to be(false)
+ expect(TestEnv.storage_dir_exists?(storage, "mepmep")).to be(false)
end
end
describe '#mv_namespace' do
it 'renames the namespace' do
- subject.add_namespace(storage, "mepmep")
- subject.mv_namespace(storage, "mepmep", "2mep")
+ Gitlab::GitalyClient::NamespaceService.allow do
+ subject.add_namespace(storage, "mepmep")
+ subject.mv_namespace(storage, "mepmep", "2mep")
+ end
- expect(subject.exists?(storage, "mepmep")).to be(false)
- expect(subject.exists?(storage, "2mep")).to be(true)
+ expect(TestEnv.storage_dir_exists?(storage, "mepmep")).to be(false)
+ expect(TestEnv.storage_dir_exists?(storage, "2mep")).to be(true)
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 46fbc069efb..cb870cc996b 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::SidekiqLogging::StructuredLogger do
describe '#call' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb b/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb
index 8410467ef1f..27eea963402 100644
--- a/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb
@@ -19,7 +19,7 @@ describe Gitlab::SidekiqMiddleware::CorrelationLogger do
end
end
- it 'injects into payload the correlation id' do
+ it 'injects into payload the correlation id', :sidekiq_might_not_need_inline do
expect_any_instance_of(described_class).to receive(:call).and_call_original
expect_any_instance_of(TestWorker).to receive(:perform).with(1234) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb
index 806112fcb16..0d8cff3a295 100644
--- a/spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb
@@ -1,69 +1,108 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::SidekiqMiddleware::Metrics do
+ let(:middleware) { described_class.new }
+ let(:concurrency_metric) { double('concurrency metric') }
+
+ let(:queue_duration_seconds) { double('queue duration seconds metric') }
+ let(:completion_seconds_metric) { double('completion seconds metric') }
+ let(:user_execution_seconds_metric) { double('user execution seconds metric') }
+ let(:failed_total_metric) { double('failed total metric') }
+ let(:retried_total_metric) { double('retried total metric') }
+ let(:running_jobs_metric) { double('running jobs metric') }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
+ allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
+ allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
+
+ allow(concurrency_metric).to receive(:set)
+ end
+
+ describe '#initialize' do
+ it 'sets general metrics' do
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
+
+ middleware
+ end
+ end
+
+ it 'ignore user execution when measured 0' do
+ allow(completion_seconds_metric).to receive(:observe)
+
+ expect(user_execution_seconds_metric).not_to receive(:observe)
+ end
+
describe '#call' do
- let(:middleware) { described_class.new }
let(:worker) { double(:worker) }
- let(:completion_seconds_metric) { double('completion seconds metric') }
- let(:user_execution_seconds_metric) { double('user execution seconds metric') }
- let(:failed_total_metric) { double('failed total metric') }
- let(:retried_total_metric) { double('retried total metric') }
- let(:running_jobs_metric) { double('running jobs metric') }
+ let(:job) { {} }
+ let(:job_status) { :done }
+ let(:labels) { { queue: :test } }
+ let(:labels_with_job_status) { { queue: :test, job_status: job_status } }
- before do
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
- allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :livesum).and_return(running_jobs_metric)
+ let(:thread_cputime_before) { 1 }
+ let(:thread_cputime_after) { 2 }
+ let(:thread_cputime_duration) { thread_cputime_after - thread_cputime_before }
- allow(running_jobs_metric).to receive(:increment)
- end
+ let(:monotonic_time_before) { 11 }
+ let(:monotonic_time_after) { 20 }
+ let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
- it 'yields block' do
- allow(completion_seconds_metric).to receive(:observe)
- allow(user_execution_seconds_metric).to receive(:observe)
+ let(:queue_duration_for_job) { 0.01 }
- expect { |b| middleware.call(worker, {}, :test, &b) }.to yield_control.once
- end
-
- it 'sets metrics' do
- labels = { queue: :test }
- allow(middleware).to receive(:get_thread_cputime).and_return(1, 3)
+ before do
+ allow(middleware).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
+ allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
- expect(user_execution_seconds_metric).to receive(:observe).with(labels, 2)
expect(running_jobs_metric).to receive(:increment).with(labels, 1)
expect(running_jobs_metric).to receive(:increment).with(labels, -1)
- expect(completion_seconds_metric).to receive(:observe).with(labels, kind_of(Numeric))
- middleware.call(worker, {}, :test) { nil }
+ expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
+ expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
+ expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
+ end
+
+ it 'yields block' do
+ expect { |b| middleware.call(worker, job, :test, &b) }.to yield_control.once
+ end
+
+ it 'sets queue specific metrics' do
+ middleware.call(worker, job, :test) { nil }
end
- it 'ignore user execution when measured 0' do
- allow(completion_seconds_metric).to receive(:observe)
- allow(middleware).to receive(:get_thread_cputime).and_return(0, 0)
+ context 'when job_duration is not available' do
+ let(:queue_duration_for_job) { nil }
- expect(user_execution_seconds_metric).not_to receive(:observe)
+ it 'does not set the queue_duration_seconds histogram' do
+ middleware.call(worker, job, :test) { nil }
+ end
end
context 'when job is retried' do
- it 'sets sidekiq_jobs_retried_total metric' do
- allow(completion_seconds_metric).to receive(:observe)
- expect(user_execution_seconds_metric).to receive(:observe)
+ let(:job) { { 'retry_count' => 1 } }
+ it 'sets sidekiq_jobs_retried_total metric' do
expect(retried_total_metric).to receive(:increment)
- middleware.call(worker, { 'retry_count' => 1 }, :test) { nil }
+ middleware.call(worker, job, :test) { nil }
end
end
context 'when error is raised' do
+ let(:job_status) { :fail }
+
it 'sets sidekiq_jobs_failed_total and reraises' do
- expect(failed_total_metric).to receive(:increment)
- expect { middleware.call(worker, {}, :test) { raise } }.to raise_error
+ expect(failed_total_metric).to receive(:increment).with(labels, 1)
+
+ expect { middleware.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
end
end
end
diff --git a/spec/lib/gitlab/slash_commands/command_spec.rb b/spec/lib/gitlab/slash_commands/command_spec.rb
index dc412c80e68..5a8c721a634 100644
--- a/spec/lib/gitlab/slash_commands/command_spec.rb
+++ b/spec/lib/gitlab/slash_commands/command_spec.rb
@@ -115,5 +115,10 @@ describe Gitlab::SlashCommands::Command do
let(:params) { { text: 'issue move #78291 to gitlab/gitlab-ci' } }
it { is_expected.to eq(Gitlab::SlashCommands::IssueMove) }
end
+
+ context 'IssueComment is triggered' do
+ let(:params) { { text: "issue comment #503\ncomment body" } }
+ it { is_expected.to eq(Gitlab::SlashCommands::IssueComment) }
+ end
end
end
diff --git a/spec/lib/gitlab/slash_commands/issue_comment_spec.rb b/spec/lib/gitlab/slash_commands/issue_comment_spec.rb
new file mode 100644
index 00000000000..c6f56d10d1f
--- /dev/null
+++ b/spec/lib/gitlab/slash_commands/issue_comment_spec.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SlashCommands::IssueComment do
+ describe '#execute' do
+ let(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project) }
+ let(:user) { issue.author }
+ let(:chat_name) { double(:chat_name, user: user) }
+ let(:regex_match) { described_class.match("issue comment #{issue.iid}\nComment body") }
+
+ subject { described_class.new(project, chat_name).execute(regex_match) }
+
+ context 'when the issue exists' do
+ context 'when project is private' do
+ let(:project) { create(:project) }
+
+ context 'when the user is not a member of the project' do
+ let(:chat_name) { double(:chat_name, user: create(:user)) }
+
+ it 'does not allow the user to comment' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to match('not found')
+ expect(issue.reload.notes.count).to be_zero
+ end
+ end
+ end
+
+ context 'when the user is not a member of the project' do
+ let(:chat_name) { double(:chat_name, user: create(:user)) }
+
+ context 'when the discussion is locked in the issue' do
+ before do
+ issue.update!(discussion_locked: true)
+ end
+
+ it 'does not allow the user to comment' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to match('You are not allowed')
+ expect(issue.reload.notes.count).to be_zero
+ end
+ end
+ end
+
+ context 'when the user can comment on the issue' do
+ context 'when comment body exists' do
+ it 'creates a new comment' do
+ expect { subject }.to change { issue.notes.count }.by(1)
+ end
+
+ it 'a new comment has a correct body' do
+ subject
+
+ expect(issue.notes.last.note).to eq('Comment body')
+ end
+ end
+
+ context 'when comment body does not exist' do
+ let(:regex_match) { described_class.match("issue comment #{issue.iid}") }
+
+ it 'does not create a new comment' do
+ expect { subject }.not_to change { issue.notes.count }
+ end
+
+ it 'displays the errors' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to match("- Note can't be blank")
+ end
+ end
+ end
+ end
+
+ context 'when the issue does not exist' do
+ let(:regex_match) { described_class.match("issue comment 2343242\nComment body") }
+
+ it 'returns not found' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to match('not found')
+ end
+ end
+ end
+
+ describe '.match' do
+ subject(:match) { described_class.match(command) }
+
+ context 'when a command has an issue ID' do
+ context 'when command has a comment body' do
+ let(:command) { "issue comment 503\nComment body" }
+
+ it 'matches an issue ID' do
+ expect(match[:iid]).to eq('503')
+ end
+
+ it 'matches an note body' do
+ expect(match[:note_body]).to eq('Comment body')
+ end
+ end
+ end
+
+ context 'when a command has a reference prefix for issue ID' do
+ let(:command) { "issue comment #503\nComment body" }
+
+ it 'matches an issue ID' do
+ expect(match[:iid]).to eq('503')
+ end
+ end
+
+ context 'when a command does not have an issue ID' do
+ let(:command) { 'issue comment' }
+
+ it 'does not match' do
+ is_expected.to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/slash_commands/presenters/access_spec.rb b/spec/lib/gitlab/slash_commands/presenters/access_spec.rb
index c7b83467660..804184a7173 100644
--- a/spec/lib/gitlab/slash_commands/presenters/access_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/access_spec.rb
@@ -22,6 +22,16 @@ describe Gitlab::SlashCommands::Presenters::Access do
end
end
+ describe '#generic_access_denied' do
+ subject { described_class.new.generic_access_denied }
+
+ it { is_expected.to be_a(Hash) }
+
+ it_behaves_like 'displays an error message' do
+ let(:error_message) { 'You are not allowed to perform the given chatops command.' }
+ end
+ end
+
describe '#deactivated' do
subject { described_class.new.deactivated }
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb
new file mode 100644
index 00000000000..b5ef417cb93
--- /dev/null
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SlashCommands::Presenters::IssueComment do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:note) { create(:note, project: project, noteable: issue) }
+ let(:author) { note.author }
+
+ describe '#present' do
+ let(:attachment) { subject[:attachments].first }
+ subject { described_class.new(note).present }
+
+ it { is_expected.to be_a(Hash) }
+
+ it 'sets ephemeral response type' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ end
+
+ it 'sets the title' do
+ expect(attachment[:title]).to eq("#{issue.title} · #{issue.to_reference}")
+ end
+
+ it 'sets the fallback text' do
+ expect(attachment[:fallback]).to eq("New comment on #{issue.to_reference}: #{issue.title}")
+ end
+
+ it 'sets the fields' do
+ expect(attachment[:fields]).to eq([{ title: 'Comment', value: note.note }])
+ end
+
+ it 'sets the color' do
+ expect(attachment[:color]).to eq('#38ae67')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sourcegraph_spec.rb b/spec/lib/gitlab/sourcegraph_spec.rb
new file mode 100644
index 00000000000..e081ae32175
--- /dev/null
+++ b/spec/lib/gitlab/sourcegraph_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Sourcegraph do
+ let_it_be(:user) { create(:user) }
+ let(:feature_scope) { true }
+
+ before do
+ Feature.enable(:sourcegraph, feature_scope)
+ end
+
+ describe '.feature_conditional?' do
+ subject { described_class.feature_conditional? }
+
+ context 'when feature is enabled globally' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when feature is enabled only to a resource' do
+ let(:feature_scope) { user }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '.feature_available?' do
+ subject { described_class.feature_available? }
+
+ context 'when feature is enabled globally' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature is enabled only to a resource' do
+ let(:feature_scope) { user }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '.feature_enabled?' do
+ let(:current_user) { nil }
+
+ subject { described_class.feature_enabled?(current_user) }
+
+ context 'when feature is enabled globally' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature is enabled only to a resource' do
+ let(:feature_scope) { user }
+
+ context 'for the same resource' do
+ let(:current_user) { user }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'for a different resource' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sql/recursive_cte_spec.rb b/spec/lib/gitlab/sql/recursive_cte_spec.rb
index 20e36c224b0..b15be56dd6d 100644
--- a/spec/lib/gitlab/sql/recursive_cte_spec.rb
+++ b/spec/lib/gitlab/sql/recursive_cte_spec.rb
@@ -20,7 +20,7 @@ describe Gitlab::SQL::RecursiveCTE do
[rel1.except(:order).to_sql, rel2.except(:order).to_sql]
end
- expect(sql).to eq("#{name} AS (#{sql1}\nUNION\n#{sql2})")
+ expect(sql).to eq("#{name} AS ((#{sql1})\nUNION\n(#{sql2}))")
end
end
diff --git a/spec/lib/gitlab/sql/union_spec.rb b/spec/lib/gitlab/sql/union_spec.rb
index f8f6da19fa5..f736614ae53 100644
--- a/spec/lib/gitlab/sql/union_spec.rb
+++ b/spec/lib/gitlab/sql/union_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::SQL::Union do
it 'returns a String joining relations together using a UNION' do
union = described_class.new([relation_1, relation_2])
- expect(union.to_sql).to eq("#{to_sql(relation_1)}\nUNION\n#{to_sql(relation_2)}")
+ expect(union.to_sql).to eq("(#{to_sql(relation_1)})\nUNION\n(#{to_sql(relation_2)})")
end
it 'skips Model.none segements' do
@@ -22,7 +22,7 @@ describe Gitlab::SQL::Union do
union = described_class.new([empty_relation, relation_1, relation_2])
expect {User.where("users.id IN (#{union.to_sql})").to_a}.not_to raise_error
- expect(union.to_sql).to eq("#{to_sql(relation_1)}\nUNION\n#{to_sql(relation_2)}")
+ expect(union.to_sql).to eq("(#{to_sql(relation_1)})\nUNION\n(#{to_sql(relation_2)})")
end
it 'uses UNION ALL when removing duplicates is disabled' do
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 50488dba48c..dc877f20cae 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -8,19 +8,23 @@ describe Gitlab::Tracking do
stub_application_setting(snowplow_enabled: true)
stub_application_setting(snowplow_collector_hostname: 'gitfoo.com')
stub_application_setting(snowplow_cookie_domain: '.gitfoo.com')
- stub_application_setting(snowplow_site_id: '_abc123_')
+ stub_application_setting(snowplow_app_id: '_abc123_')
+ stub_application_setting(snowplow_iglu_registry_url: 'https://example.org')
end
describe '.snowplow_options' do
it 'returns useful client options' do
- expect(described_class.snowplow_options(nil)).to eq(
+ expected_fields = {
namespace: 'gl',
hostname: 'gitfoo.com',
cookieDomain: '.gitfoo.com',
appId: '_abc123_',
formTracking: true,
- linkClickTracking: true
- )
+ linkClickTracking: true,
+ igluRegistryUrl: 'https://example.org'
+ }
+
+ expect(subject.snowplow_options(nil)).to match(expected_fields)
end
it 'enables features using feature flags' do
@@ -29,11 +33,12 @@ describe Gitlab::Tracking do
:additional_snowplow_tracking,
'_group_'
).and_return(false)
-
- expect(described_class.snowplow_options('_group_')).to include(
+ addition_feature_fields = {
formTracking: false,
linkClickTracking: false
- )
+ }
+
+ expect(subject.snowplow_options('_group_')).to include(addition_feature_fields)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb
index 7a01f7d1de8..96ebeb8ff76 100644
--- a/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb
@@ -34,22 +34,54 @@ describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_shared_st
it_behaves_like 'counter examples'
end
+ describe 'previews counter' do
+ let(:setting_enabled) { true }
+
+ before do
+ stub_application_setting(web_ide_clientside_preview_enabled: setting_enabled)
+ end
+
+ context 'when web ide clientside preview is enabled' do
+ let(:increment_counter_method) { :increment_previews_count }
+ let(:total_counter_method) { :total_previews_count }
+
+ it_behaves_like 'counter examples'
+ end
+
+ context 'when web ide clientside preview is not enabled' do
+ let(:setting_enabled) { false }
+
+ it 'does not increment the counter' do
+ expect(described_class.total_previews_count).to eq(0)
+
+ 2.times { described_class.increment_previews_count }
+
+ expect(described_class.total_previews_count).to eq(0)
+ end
+ end
+ end
+
describe '.totals' do
commits = 5
merge_requests = 3
views = 2
+ previews = 4
before do
+ stub_application_setting(web_ide_clientside_preview_enabled: true)
+
commits.times { described_class.increment_commits_count }
merge_requests.times { described_class.increment_merge_requests_count }
views.times { described_class.increment_views_count }
+ previews.times { described_class.increment_previews_count }
end
it 'can report all totals' do
expect(described_class.totals).to include(
web_ide_commits: commits,
web_ide_views: views,
- web_ide_merge_requests: merge_requests
+ web_ide_merge_requests: merge_requests,
+ web_ide_previews: previews
)
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index f2e864472c5..484684eeb65 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -17,21 +17,41 @@ describe Gitlab::UsageData do
create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true)
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
+ create(:service, project: projects[2], type: 'MattermostService', active: true)
+ create(:service, project: projects[2], type: 'JenkinsService', active: true)
+ create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
-
- gcp_cluster = create(:cluster, :provided_by_gcp)
- create(:cluster, :provided_by_user)
- create(:cluster, :provided_by_user, :disabled)
+ create_list(:issue, 4, project: projects[0])
+ create(:zoom_meeting, project: projects[0], issue: projects[0].issues[0], issue_status: :added)
+ create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[1], issue_status: :removed)
+ create(:zoom_meeting, project: projects[0], issue: projects[0].issues[2], issue_status: :added)
+ create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[2], issue_status: :removed)
+
+ # Enabled clusters
+ gcp_cluster = create(:cluster_provider_gcp, :created).cluster
+ create(:cluster_provider_aws, :created)
+ create(:cluster_platform_kubernetes)
create(:cluster, :group)
+
+ # Disabled clusters
+ create(:cluster, :disabled)
create(:cluster, :group, :disabled)
create(:cluster, :group, :disabled)
+
+ # Applications
create(:clusters_applications_helm, :installed, cluster: gcp_cluster)
create(:clusters_applications_ingress, :installed, cluster: gcp_cluster)
create(:clusters_applications_cert_manager, :installed, cluster: gcp_cluster)
create(:clusters_applications_prometheus, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_crossplane, :installed, cluster: gcp_cluster)
create(:clusters_applications_runner, :installed, cluster: gcp_cluster)
create(:clusters_applications_knative, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_elastic_stack, :installed, cluster: gcp_cluster)
+
+ create(:grafana_integration, project: projects[0], enabled: true)
+ create(:grafana_integration, project: projects[1], enabled: true)
+ create(:grafana_integration, project: projects[2], enabled: false)
ProjectFeature.first.update_attribute('repository_access_level', 0)
end
@@ -64,6 +84,8 @@ describe Gitlab::UsageData do
avg_cycle_analytics
influxdb_metrics_enabled
prometheus_metrics_enabled
+ web_ide_clientside_preview_enabled
+ ingress_modsecurity_enabled
))
end
@@ -81,6 +103,7 @@ describe Gitlab::UsageData do
web_ide_views
web_ide_commits
web_ide_merge_requests
+ web_ide_previews
navbar_searches
cycle_analytics_views
productivity_analytics_views
@@ -112,17 +135,23 @@ describe Gitlab::UsageData do
clusters_disabled
project_clusters_disabled
group_clusters_disabled
+ clusters_platforms_eks
clusters_platforms_gke
clusters_platforms_user
clusters_applications_helm
clusters_applications_ingress
clusters_applications_cert_managers
clusters_applications_prometheus
+ clusters_applications_crossplane
clusters_applications_runner
clusters_applications_knative
+ clusters_applications_elastic_stack
in_review_folder
+ grafana_integrated_projects
groups
issues
+ issues_with_associated_zoom_link
+ issues_using_zoom_quick_actions
keys
label_lists
labels
@@ -139,6 +168,9 @@ describe Gitlab::UsageData do
projects_jira_cloud_active
projects_slack_notifications_active
projects_slack_slash_active
+ projects_custom_issue_tracker_active
+ projects_jenkins_active
+ projects_mattermost_active
projects_prometheus_active
projects_with_repositories_enabled
projects_with_error_tracking_enabled
@@ -172,24 +204,33 @@ describe Gitlab::UsageData do
expect(count_data[:projects_jira_cloud_active]).to eq(2)
expect(count_data[:projects_slack_notifications_active]).to eq(2)
expect(count_data[:projects_slack_slash_active]).to eq(1)
+ expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
+ expect(count_data[:projects_jenkins_active]).to eq(1)
+ expect(count_data[:projects_mattermost_active]).to eq(1)
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
+ expect(count_data[:issues_with_associated_zoom_link]).to eq(2)
+ expect(count_data[:issues_using_zoom_quick_actions]).to eq(3)
- expect(count_data[:clusters_enabled]).to eq(7)
- expect(count_data[:project_clusters_enabled]).to eq(6)
+ expect(count_data[:clusters_enabled]).to eq(4)
+ expect(count_data[:project_clusters_enabled]).to eq(3)
expect(count_data[:group_clusters_enabled]).to eq(1)
expect(count_data[:clusters_disabled]).to eq(3)
expect(count_data[:project_clusters_disabled]).to eq(1)
expect(count_data[:group_clusters_disabled]).to eq(2)
expect(count_data[:group_clusters_enabled]).to eq(1)
+ expect(count_data[:clusters_platforms_eks]).to eq(1)
expect(count_data[:clusters_platforms_gke]).to eq(1)
expect(count_data[:clusters_platforms_user]).to eq(1)
expect(count_data[:clusters_applications_helm]).to eq(1)
expect(count_data[:clusters_applications_ingress]).to eq(1)
expect(count_data[:clusters_applications_cert_managers]).to eq(1)
+ expect(count_data[:clusters_applications_crossplane]).to eq(1)
expect(count_data[:clusters_applications_prometheus]).to eq(1)
expect(count_data[:clusters_applications_runner]).to eq(1)
expect(count_data[:clusters_applications_knative]).to eq(1)
+ expect(count_data[:clusters_applications_elastic_stack]).to eq(1)
+ expect(count_data[:grafana_integrated_projects]).to eq(2)
end
it 'works when queries time out' do
@@ -232,6 +273,7 @@ describe Gitlab::UsageData do
expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
+ expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
end
end
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index c25bd14fcba..4e7c43a6856 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -148,7 +148,7 @@ describe Gitlab::UserAccess do
)
end
- it 'allows users that have push access to the canonical project to push to the MR branch' do
+ it 'allows users that have push access to the canonical project to push to the MR branch', :sidekiq_might_not_need_inline do
canonical_project.add_developer(user)
expect(access.can_push_to_branch?('awesome-feature')).to be_truthy
diff --git a/spec/lib/gitlab/utils/deep_size_spec.rb b/spec/lib/gitlab/utils/deep_size_spec.rb
index 47dfc04f46f..ccd202b33f7 100644
--- a/spec/lib/gitlab/utils/deep_size_spec.rb
+++ b/spec/lib/gitlab/utils/deep_size_spec.rb
@@ -42,4 +42,10 @@ describe Gitlab::Utils::DeepSize do
end
end
end
+
+ describe '.human_default_max_size' do
+ it 'returns 1 MB' do
+ expect(described_class.human_default_max_size).to eq('1 MB')
+ end
+ end
end
diff --git a/spec/lib/gitlab/visibility_level_checker_spec.rb b/spec/lib/gitlab/visibility_level_checker_spec.rb
index 325ac3c6f31..fc929d5cbbf 100644
--- a/spec/lib/gitlab/visibility_level_checker_spec.rb
+++ b/spec/lib/gitlab/visibility_level_checker_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::VisibilityLevelChecker do
diff --git a/spec/lib/gitlab/wiki_file_finder_spec.rb b/spec/lib/gitlab/wiki_file_finder_spec.rb
index fdd95d5e6e6..aeba081f3d3 100644
--- a/spec/lib/gitlab/wiki_file_finder_spec.rb
+++ b/spec/lib/gitlab/wiki_file_finder_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::WikiFileFinder do
it_behaves_like 'file finder' do
subject { described_class.new(project, project.wiki.default_branch) }
- let(:expected_file_by_name) { 'Files/Title.md' }
+ let(:expected_file_by_path) { 'Files/Title.md' }
let(:expected_file_by_content) { 'CHANGELOG.md' }
end
end
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index 6bf837f1d3f..9362ff72fbc 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -96,6 +96,48 @@ describe Gitlab do
end
end
+ describe '.canary?' do
+ it 'is true when CANARY env var is set to true' do
+ stub_env('CANARY', '1')
+
+ expect(described_class.canary?).to eq true
+ end
+
+ it 'is false when CANARY env var is set to false' do
+ stub_env('CANARY', '0')
+
+ expect(described_class.canary?).to eq false
+ end
+ end
+
+ describe '.com_and_canary?' do
+ it 'is true when on .com and canary' do
+ allow(described_class).to receive_messages(com?: true, canary?: true)
+
+ expect(described_class.com_and_canary?).to eq true
+ end
+
+ it 'is false when on .com but not on canary' do
+ allow(described_class).to receive_messages(com?: true, canary?: false)
+
+ expect(described_class.com_and_canary?).to eq false
+ end
+ end
+
+ describe '.com_but_not_canary?' do
+ it 'is false when on .com and canary' do
+ allow(described_class).to receive_messages(com?: true, canary?: true)
+
+ expect(described_class.com_but_not_canary?).to eq false
+ end
+
+ it 'is true when on .com but not on canary' do
+ allow(described_class).to receive_messages(com?: true, canary?: false)
+
+ expect(described_class.com_but_not_canary?).to eq true
+ end
+ end
+
describe '.dev_env_org_or_com?' do
it 'is true when on .com' do
allow(described_class).to receive_messages(com?: true, org?: false)
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 0f7f57095df..473ad639ead 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -104,7 +104,8 @@ describe GoogleApi::CloudPlatform::Client do
enabled: legacy_abac
},
ip_allocation_policy: {
- use_ip_aliases: true
+ use_ip_aliases: true,
+ cluster_ipv4_cidr_block: '/16'
},
addons_config: addons_config
}
diff --git a/spec/lib/grafana/client_spec.rb b/spec/lib/grafana/client_spec.rb
index bd93a3c59a2..699344e940e 100644
--- a/spec/lib/grafana/client_spec.rb
+++ b/spec/lib/grafana/client_spec.rb
@@ -35,7 +35,7 @@ describe Grafana::Client do
it 'does not follow redirects' do
expect { subject }.to raise_exception(
Grafana::Client::Error,
- 'Grafana response status code: 302'
+ 'Grafana response status code: 302, Message: {}'
)
expect(redirect_req_stub).to have_been_requested
@@ -67,6 +67,30 @@ describe Grafana::Client do
end
end
+ describe '#get_dashboard' do
+ let(:grafana_api_url) { 'https://grafanatest.com/-/grafana-project/api/dashboards/uid/FndfgnX' }
+
+ subject do
+ client.get_dashboard(uid: 'FndfgnX')
+ end
+
+ it_behaves_like 'calls grafana api'
+ it_behaves_like 'no redirects'
+ it_behaves_like 'handles exceptions'
+ end
+
+ describe '#get_datasource' do
+ let(:grafana_api_url) { 'https://grafanatest.com/-/grafana-project/api/datasources/name/Test%20Name' }
+
+ subject do
+ client.get_datasource(name: 'Test Name')
+ end
+
+ it_behaves_like 'calls grafana api'
+ it_behaves_like 'no redirects'
+ it_behaves_like 'handles exceptions'
+ end
+
describe '#proxy_datasource' do
let(:grafana_api_url) do
'https://grafanatest.com/-/grafana-project/' \
diff --git a/spec/lib/omni_auth/strategies/saml_spec.rb b/spec/lib/omni_auth/strategies/saml_spec.rb
index 3c59de86d98..73e86872308 100644
--- a/spec/lib/omni_auth/strategies/saml_spec.rb
+++ b/spec/lib/omni_auth/strategies/saml_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe OmniAuth::Strategies::SAML, type: :strategy do
diff --git a/spec/lib/prometheus/pid_provider_spec.rb b/spec/lib/prometheus/pid_provider_spec.rb
index ba843b27254..6fdc11b14c4 100644
--- a/spec/lib/prometheus/pid_provider_spec.rb
+++ b/spec/lib/prometheus/pid_provider_spec.rb
@@ -18,7 +18,17 @@ describe Prometheus::PidProvider do
expect(Sidekiq).to receive(:server?).and_return(true)
end
- it { is_expected.to eq 'sidekiq' }
+ context 'in a clustered setup' do
+ before do
+ stub_env('SIDEKIQ_WORKER_ID', '123')
+ end
+
+ it { is_expected.to eq 'sidekiq_123' }
+ end
+
+ context 'in a single process setup' do
+ it { is_expected.to eq 'sidekiq' }
+ end
end
context 'when running in Unicorn mode' do
diff --git a/spec/lib/quality/helm_client_spec.rb b/spec/lib/quality/helm_client_spec.rb
index 7abb9688d5a..da5ba4c4d99 100644
--- a/spec/lib/quality/helm_client_spec.rb
+++ b/spec/lib/quality/helm_client_spec.rb
@@ -107,5 +107,25 @@ RSpec.describe Quality::HelmClient do
expect(subject.delete(release_name: release_name)).to eq('')
end
+
+ context 'with multiple release names' do
+ let(:release_name) { ['my-release', 'my-release-2'] }
+
+ it 'raises an error if the Helm command fails' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(helm delete --tiller-namespace "#{namespace}" --purge #{release_name.join(' ')})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+
+ expect { subject.delete(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
+ end
+
+ it 'calls helm delete with multiple release names' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(helm delete --tiller-namespace "#{namespace}" --purge #{release_name.join(' ')})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+
+ expect(subject.delete(release_name: release_name)).to eq('')
+ end
+ end
end
end
diff --git a/spec/lib/quality/kubernetes_client_spec.rb b/spec/lib/quality/kubernetes_client_spec.rb
index 4e77dcc97e6..5bac102ac41 100644
--- a/spec/lib/quality/kubernetes_client_spec.rb
+++ b/spec/lib/quality/kubernetes_client_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Quality::KubernetesClient do
expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(kubectl --namespace "#{namespace}" delete ) \
'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa ' \
- "--now --ignore-not-found --include-uninitialized -l release=\"#{release_name}\""])
+ "--now --ignore-not-found --include-uninitialized --wait=true -l release=\"#{release_name}\""])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
expect { subject.cleanup(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
@@ -23,11 +23,59 @@ RSpec.describe Quality::KubernetesClient do
expect(Gitlab::Popen).to receive(:popen_with_detail)
.with([%(kubectl --namespace "#{namespace}" delete ) \
'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa ' \
- "--now --ignore-not-found --include-uninitialized -l release=\"#{release_name}\""])
+ "--now --ignore-not-found --include-uninitialized --wait=true -l release=\"#{release_name}\""])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
# We're not verifying the output here, just silencing it
expect { subject.cleanup(release_name: release_name) }.to output.to_stdout
end
+
+ context 'with multiple releases' do
+ let(:release_name) { ['my-release', 'my-release-2'] }
+
+ it 'raises an error if the Kubernetes command fails' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(kubectl --namespace "#{namespace}" delete ) \
+ 'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa ' \
+ "--now --ignore-not-found --include-uninitialized --wait=true -l 'release in (#{release_name.join(', ')})'"])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+
+ expect { subject.cleanup(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
+ end
+
+ it 'calls kubectl with the correct arguments' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(kubectl --namespace "#{namespace}" delete ) \
+ 'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa ' \
+ "--now --ignore-not-found --include-uninitialized --wait=true -l 'release in (#{release_name.join(', ')})'"])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+
+ # We're not verifying the output here, just silencing it
+ expect { subject.cleanup(release_name: release_name) }.to output.to_stdout
+ end
+ end
+
+ context 'with `wait: false`' do
+ it 'raises an error if the Kubernetes command fails' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(kubectl --namespace "#{namespace}" delete ) \
+ 'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa ' \
+ "--now --ignore-not-found --include-uninitialized --wait=false -l release=\"#{release_name}\""])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+
+ expect { subject.cleanup(release_name: release_name, wait: false) }.to raise_error(described_class::CommandFailedError)
+ end
+
+ it 'calls kubectl with the correct arguments' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(kubectl --namespace "#{namespace}" delete ) \
+ 'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa ' \
+ "--now --ignore-not-found --include-uninitialized --wait=false -l release=\"#{release_name}\""])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+
+ # We're not verifying the output here, just silencing it
+ expect { subject.cleanup(release_name: release_name, wait: false) }.to output.to_stdout
+ end
+ end
end
end
diff --git a/spec/lib/sentry/client_spec.rb b/spec/lib/sentry/client_spec.rb
index ca2b17b44e0..8101664d34f 100644
--- a/spec/lib/sentry/client_spec.rb
+++ b/spec/lib/sentry/client_spec.rb
@@ -192,6 +192,15 @@ describe Sentry::Client do
end
end
+ context 'sentry api response too large' do
+ it 'raises exception' do
+ deep_size = double('Gitlab::Utils::DeepSize', valid?: false)
+ allow(Gitlab::Utils::DeepSize).to receive(:new).with(sentry_api_response).and_return(deep_size)
+
+ expect { subject }.to raise_error(Sentry::Client::ResponseInvalidSizeError, 'Sentry API response is too big. Limit is 1 MB.')
+ end
+ end
+
it_behaves_like 'maps exceptions'
end
diff --git a/spec/mailers/abuse_report_mailer_spec.rb b/spec/mailers/abuse_report_mailer_spec.rb
index 86153071cd3..fcbffb52849 100644
--- a/spec/mailers/abuse_report_mailer_spec.rb
+++ b/spec/mailers/abuse_report_mailer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe AbuseReportMailer do
diff --git a/spec/mailers/emails/merge_requests_spec.rb b/spec/mailers/emails/merge_requests_spec.rb
index 2ad572bb5c7..541acc47172 100644
--- a/spec/mailers/emails/merge_requests_spec.rb
+++ b/spec/mailers/emails/merge_requests_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'email_spec'
diff --git a/spec/mailers/emails/pages_domains_spec.rb b/spec/mailers/emails/pages_domains_spec.rb
index c52e3c2191d..e360e38256e 100644
--- a/spec/mailers/emails/pages_domains_spec.rb
+++ b/spec/mailers/emails/pages_domains_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'email_spec'
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index 1f7be415e35..d340f207dc7 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'email_spec'
diff --git a/spec/mailers/emails/releases_spec.rb b/spec/mailers/emails/releases_spec.rb
index 19f404db2a6..c614c009434 100644
--- a/spec/mailers/emails/releases_spec.rb
+++ b/spec/mailers/emails/releases_spec.rb
@@ -18,6 +18,7 @@ describe Emails::Releases do
context 'when the release has a name' do
it 'shows the correct subject' do
+ release.name = 'beta-1'
expected_subject = "#{release.project.name} | New release: #{release.name} - #{release.tag}"
is_expected.to have_subject(expected_subject)
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 1991bac0229..cafb96898b3 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'email_spec'
diff --git a/spec/mailers/repository_check_mailer_spec.rb b/spec/mailers/repository_check_mailer_spec.rb
index 757d3dfa797..1fd4d28ca53 100644
--- a/spec/mailers/repository_check_mailer_spec.rb
+++ b/spec/mailers/repository_check_mailer_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe RepositoryCheckMailer do
diff --git a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
index 5c6f213e15b..f4155eab1bf 100644
--- a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
+++ b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180122154930_schedule_set_confidential_note_events_on_services.rb')
@@ -30,7 +32,7 @@ describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do
end
end
- it 'correctly processes services' do
+ it 'correctly processes services', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
expect(services_table.where(confidential_note_events: nil).count).to eq 4
expect(services_table.where(confidential_note_events: true).count).to eq 1
diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb
index bc246f88685..617e31f359b 100644
--- a/spec/migrations/active_record/schema_spec.rb
+++ b/spec/migrations/active_record/schema_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# Check consistency of db/schema.rb version, migrations' timestamps, and the latest migration timestamp
diff --git a/spec/migrations/add_default_and_free_plans_spec.rb b/spec/migrations/add_default_and_free_plans_spec.rb
new file mode 100644
index 00000000000..ae40b5b10c2
--- /dev/null
+++ b/spec/migrations/add_default_and_free_plans_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20191023152913_add_default_and_free_plans.rb')
+
+describe AddDefaultAndFreePlans, :migration do
+ describe 'migrate' do
+ let(:plans) { table(:plans) }
+
+ context 'when on Gitlab.com' do
+ before do
+ expect(Gitlab).to receive(:com?) { true }
+ end
+
+ it 'creates free and default plans' do
+ expect { migrate! }.to change { plans.count }.by 2
+
+ expect(plans.last(2).pluck(:name)).to eq %w[free default]
+ end
+ end
+
+ context 'when on self-hosted' do
+ before do
+ expect(Gitlab).to receive(:com?) { false }
+ end
+
+ it 'creates only a default plan' do
+ expect { migrate! }.to change { plans.count }.by 1
+
+ expect(plans.last.name).to eq 'default'
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_foreign_keys_to_todos_spec.rb b/spec/migrations/add_foreign_keys_to_todos_spec.rb
index 2500e2f8333..9932113a003 100644
--- a/spec/migrations/add_foreign_keys_to_todos_spec.rb
+++ b/spec/migrations/add_foreign_keys_to_todos_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180201110056_add_foreign_keys_to_todos.rb')
diff --git a/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb b/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb
index 6fd3cb1f44e..24ae939afa7 100644
--- a/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb
+++ b/spec/migrations/add_not_null_constraint_to_project_mirror_data_foreign_key_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180508100222_add_not_null_constraint_to_project_mirror_data_foreign_key.rb')
diff --git a/spec/migrations/add_pages_access_level_to_project_feature_spec.rb b/spec/migrations/add_pages_access_level_to_project_feature_spec.rb
index 3946602c5be..a5e2bf2de71 100644
--- a/spec/migrations/add_pages_access_level_to_project_feature_spec.rb
+++ b/spec/migrations/add_pages_access_level_to_project_feature_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180423204600_add_pages_access_level_to_project_feature.rb')
diff --git a/spec/migrations/add_pipeline_build_foreign_key_spec.rb b/spec/migrations/add_pipeline_build_foreign_key_spec.rb
index e9413f52f19..bb40ead9b93 100644
--- a/spec/migrations/add_pipeline_build_foreign_key_spec.rb
+++ b/spec/migrations/add_pipeline_build_foreign_key_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180420010016_add_pipeline_build_foreign_key.rb')
diff --git a/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb b/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
index bf299b70a29..8b128ff5ab8 100644
--- a/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
+++ b/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180511174224_add_unique_constraint_to_project_features_project_id.rb')
diff --git a/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb b/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
index b8c3a3eda4e..ae53b4e6443 100644
--- a/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
+++ b/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180425131009_assure_commits_count_for_merge_request_diff.rb')
diff --git a/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb b/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb
index 65a918d5440..913b4d3f114 100644
--- a/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb
+++ b/spec/migrations/backfill_store_project_full_path_in_repo_spec.rb
@@ -20,7 +20,7 @@ describe BackfillStoreProjectFullPathInRepo, :migration do
describe '#up' do
shared_examples_for 'writes the full path to git config' do
- it 'writes the git config' do
+ it 'writes the git config', :sidekiq_might_not_need_inline do
expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
allow(repository_service).to receive(:cleanup)
expect(repository_service).to receive(:set_config).with('gitlab.fullpath' => expected_path)
@@ -29,7 +29,7 @@ describe BackfillStoreProjectFullPathInRepo, :migration do
migration.up
end
- it 'retries in case of failure' do
+ it 'retries in case of failure', :sidekiq_might_not_need_inline do
repository_service = spy(:repository_service)
allow(Gitlab::GitalyClient::RepositoryService).to receive(:new).and_return(repository_service)
@@ -40,7 +40,7 @@ describe BackfillStoreProjectFullPathInRepo, :migration do
migration.up
end
- it 'cleans up repository before writing the config' do
+ it 'cleans up repository before writing the config', :sidekiq_might_not_need_inline do
expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
expect(repository_service).to receive(:cleanup).ordered
expect(repository_service).to receive(:set_config).ordered
@@ -87,7 +87,7 @@ describe BackfillStoreProjectFullPathInRepo, :migration do
context 'project in group' do
let!(:project) { projects.create!(namespace_id: group.id, name: 'baz', path: 'baz') }
- it 'deletes the gitlab full config value' do
+ it 'deletes the gitlab full config value', :sidekiq_might_not_need_inline do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService)
.to receive(:delete_config).with(['gitlab.fullpath'])
diff --git a/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb b/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb
index 7e61ab9b52e..699708ad1d4 100644
--- a/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb
+++ b/spec/migrations/change_default_value_for_dsa_key_restriction_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180531220618_change_default_value_for_dsa_key_restriction.rb')
diff --git a/spec/migrations/cleanup_build_stage_migration_spec.rb b/spec/migrations/cleanup_build_stage_migration_spec.rb
index 4d4d02aaa94..532212810c8 100644
--- a/spec/migrations/cleanup_build_stage_migration_spec.rb
+++ b/spec/migrations/cleanup_build_stage_migration_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180420010616_cleanup_build_stage_migration.rb')
diff --git a/spec/migrations/cleanup_environments_external_url_spec.rb b/spec/migrations/cleanup_environments_external_url_spec.rb
index 07ddaf3d38f..bc20f936593 100644
--- a/spec/migrations/cleanup_environments_external_url_spec.rb
+++ b/spec/migrations/cleanup_environments_external_url_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20181108091549_cleanup_environments_external_url.rb')
diff --git a/spec/migrations/cleanup_stages_position_migration_spec.rb b/spec/migrations/cleanup_stages_position_migration_spec.rb
index dde5a777487..649fda1bb4e 100644
--- a/spec/migrations/cleanup_stages_position_migration_spec.rb
+++ b/spec/migrations/cleanup_stages_position_migration_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180604123514_cleanup_stages_position_migration.rb')
diff --git a/spec/migrations/create_missing_namespace_for_internal_users_spec.rb b/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
index 3fd4c5bc8d6..5df08a74e56 100644
--- a/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
+++ b/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180413022611_create_missing_namespace_for_internal_users.rb')
diff --git a/spec/migrations/drop_duplicate_protected_tags_spec.rb b/spec/migrations/drop_duplicate_protected_tags_spec.rb
index acfb6850722..7f0c7efbf66 100644
--- a/spec/migrations/drop_duplicate_protected_tags_spec.rb
+++ b/spec/migrations/drop_duplicate_protected_tags_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180711103851_drop_duplicate_protected_tags.rb')
diff --git a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
index abf39317188..327fb09ffec 100644
--- a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
+++ b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180216121030_enqueue_verify_pages_domain_workers')
diff --git a/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb b/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb
index cf5c10f77e1..50ecf083f27 100644
--- a/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb
+++ b/spec/migrations/fill_empty_finished_at_in_deployments_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181030135124_fill_empty_finished_at_in_deployments')
diff --git a/spec/migrations/fill_file_store_spec.rb b/spec/migrations/fill_file_store_spec.rb
index 5ff7aa56ce2..806c9283634 100644
--- a/spec/migrations/fill_file_store_spec.rb
+++ b/spec/migrations/fill_file_store_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180424151928_fill_file_store')
@@ -21,7 +23,7 @@ describe FillFileStore, :migration do
uploads.create!(size: 10, path: 'path', uploader: 'uploader', mount_point: 'file_name', store: nil)
end
- it 'correctly migrates nullified file_store/store column' do
+ it 'correctly migrates nullified file_store/store column', :sidekiq_might_not_need_inline do
expect(job_artifacts.where(file_store: nil).count).to eq(1)
expect(lfs_objects.where(file_store: nil).count).to eq(1)
expect(uploads.where(store: nil).count).to eq(1)
diff --git a/spec/migrations/fill_productivity_analytics_start_date_spec.rb b/spec/migrations/fill_productivity_analytics_start_date_spec.rb
new file mode 100644
index 00000000000..7cbba9ef20e
--- /dev/null
+++ b/spec/migrations/fill_productivity_analytics_start_date_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20191004081520_fill_productivity_analytics_start_date.rb')
+
+describe FillProductivityAnalyticsStartDate, :migration do
+ let(:settings_table) { table('application_settings') }
+ let(:metrics_table) { table('merge_request_metrics') }
+
+ before do
+ settings_table.create!
+ end
+
+ context 'with NO productivity analytics data available' do
+ it 'sets start_date to NOW' do
+ expect { migrate! }.to change {
+ settings_table.first&.productivity_analytics_start_date
+ }.to(be_like_time(Time.now))
+ end
+ end
+
+ context 'with productivity analytics data available' do
+ before do
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute('ALTER TABLE merge_request_metrics DISABLE TRIGGER ALL')
+ metrics_table.create!(merged_at: Time.parse('2019-09-09'), commits_count: nil, merge_request_id: 3)
+ metrics_table.create!(merged_at: Time.parse('2019-10-10'), commits_count: 5, merge_request_id: 1)
+ metrics_table.create!(merged_at: Time.parse('2019-11-11'), commits_count: 10, merge_request_id: 2)
+ ActiveRecord::Base.connection.execute('ALTER TABLE merge_request_metrics ENABLE TRIGGER ALL')
+ end
+ end
+
+ it 'set start_date to earliest merged_at value with PA data available' do
+ expect { migrate! }.to change {
+ settings_table.first&.productivity_analytics_start_date
+ }.to(be_like_time(Time.parse('2019-10-10')))
+ end
+ end
+end
diff --git a/spec/migrations/fix_wrong_pages_access_level_spec.rb b/spec/migrations/fix_wrong_pages_access_level_spec.rb
index 75ac5d919b2..73d8218b95c 100644
--- a/spec/migrations/fix_wrong_pages_access_level_spec.rb
+++ b/spec/migrations/fix_wrong_pages_access_level_spec.rb
@@ -1,7 +1,9 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190703185326_fix_wrong_pages_access_level.rb')
-describe FixWrongPagesAccessLevel, :migration, :sidekiq, schema: 20190628185004 do
+describe FixWrongPagesAccessLevel, :migration, :sidekiq_might_not_need_inline, schema: 20190628185004 do
using RSpec::Parameterized::TableSyntax
let(:migration_class) { described_class::MIGRATION }
diff --git a/spec/migrations/generate_lets_encrypt_private_key_spec.rb b/spec/migrations/generate_lets_encrypt_private_key_spec.rb
index 773bf5222f0..7746ba46446 100644
--- a/spec/migrations/generate_lets_encrypt_private_key_spec.rb
+++ b/spec/migrations/generate_lets_encrypt_private_key_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190524062810_generate_lets_encrypt_private_key.rb')
diff --git a/spec/migrations/generate_missing_routes_spec.rb b/spec/migrations/generate_missing_routes_spec.rb
index 30ad135d4df..a4a25951ff0 100644
--- a/spec/migrations/generate_missing_routes_spec.rb
+++ b/spec/migrations/generate_missing_routes_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180702134423_generate_missing_routes.rb')
diff --git a/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb b/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb
index a1f243651b5..4e7438fc182 100644
--- a/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181219145520_migrate_cluster_configure_worker_sidekiq_queue.rb')
diff --git a/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb b/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb
index 66555118a43..d54aac50dc8 100644
--- a/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180306074045_migrate_create_trace_artifact_sidekiq_queue.rb')
diff --git a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
index df82672f254..98bbe0ed5a2 100644
--- a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
+++ b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180816161409_migrate_legacy_artifacts_to_job_artifacts.rb')
@@ -40,7 +42,7 @@ describe MigrateLegacyArtifactsToJobArtifacts, :migration, :sidekiq do
end
end
- it 'migrates legacy artifacts to ci_job_artifacts table' do
+ it 'migrates legacy artifacts to ci_job_artifacts table', :sidekiq_might_not_need_inline do
migrate!
expect(job_artifacts.order(:job_id, :file_type).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
diff --git a/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb b/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb
index 6ce04805e5d..6a188f34854 100644
--- a/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180603190921_migrate_object_storage_upload_sidekiq_queue.rb')
diff --git a/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb b/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
index 94de208e53e..d8f39ce4e71 100644
--- a/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190124200344_migrate_storage_migrator_sidekiq_queue.rb')
diff --git a/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb b/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb
index 976f3ce07d7..e517eef1320 100644
--- a/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180307012445_migrate_update_head_pipeline_for_merge_request_sidekiq_queue.rb')
diff --git a/spec/migrations/move_limits_from_plans_spec.rb b/spec/migrations/move_limits_from_plans_spec.rb
new file mode 100644
index 00000000000..693d6ecb2c1
--- /dev/null
+++ b/spec/migrations/move_limits_from_plans_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20191030152934_move_limits_from_plans.rb')
+
+describe MoveLimitsFromPlans, :migration do
+ let(:plans) { table(:plans) }
+ let(:plan_limits) { table(:plan_limits) }
+
+ let!(:early_adopter_plan) { plans.create(name: 'early_adopter', title: 'Early adopter', active_pipelines_limit: 10, pipeline_size_limit: 11, active_jobs_limit: 12) }
+ let!(:gold_plan) { plans.create(name: 'gold', title: 'Gold', active_pipelines_limit: 20, pipeline_size_limit: 21, active_jobs_limit: 22) }
+ let!(:silver_plan) { plans.create(name: 'silver', title: 'Silver', active_pipelines_limit: 30, pipeline_size_limit: 31, active_jobs_limit: 32) }
+ let!(:bronze_plan) { plans.create(name: 'bronze', title: 'Bronze', active_pipelines_limit: 40, pipeline_size_limit: 41, active_jobs_limit: 42) }
+ let!(:free_plan) { plans.create(name: 'free', title: 'Free', active_pipelines_limit: 50, pipeline_size_limit: 51, active_jobs_limit: 52) }
+ let!(:other_plan) { plans.create(name: 'other', title: 'Other', active_pipelines_limit: nil, pipeline_size_limit: nil, active_jobs_limit: 0) }
+
+ describe 'migrate' do
+ it 'populates plan_limits from all the records in plans' do
+ expect { migrate! }.to change { plan_limits.count }.by 6
+ end
+
+ it 'copies plan limits and plan.id into to plan_limits table' do
+ migrate!
+
+ new_data = plan_limits.pluck(:plan_id, :ci_active_pipelines, :ci_pipeline_size, :ci_active_jobs)
+ expected_data = [
+ [early_adopter_plan.id, 10, 11, 12],
+ [gold_plan.id, 20, 21, 22],
+ [silver_plan.id, 30, 31, 32],
+ [bronze_plan.id, 40, 41, 42],
+ [free_plan.id, 50, 51, 52],
+ [other_plan.id, 0, 0, 0]
+ ]
+ expect(new_data).to contain_exactly(*expected_data)
+ end
+ end
+end
diff --git a/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb b/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb
index 441c4295a40..ad1bcf37732 100644
--- a/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb
+++ b/spec/migrations/remove_empty_extern_uid_auth0_identities_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180220150310_remove_empty_extern_uid_auth0_identities.rb')
diff --git a/spec/migrations/remove_empty_github_service_templates_spec.rb b/spec/migrations/remove_empty_github_service_templates_spec.rb
new file mode 100644
index 00000000000..c128c8538db
--- /dev/null
+++ b/spec/migrations/remove_empty_github_service_templates_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20191021101942_remove_empty_github_service_templates.rb')
+
+describe RemoveEmptyGithubServiceTemplates, :migration do
+ subject(:migration) { described_class.new }
+
+ let(:services) do
+ table(:services).tap do |klass|
+ klass.class_eval do
+ serialize :properties, JSON
+ end
+ end
+ end
+
+ before do
+ services.delete_all
+
+ create_service(properties: nil)
+ create_service(properties: {})
+ create_service(properties: { some: :value })
+ create_service(properties: {}, template: false)
+ create_service(properties: {}, type: 'SomeType')
+ end
+
+ def all_service_properties
+ services.where(template: true, type: 'GithubService').pluck(:properties)
+ end
+
+ it 'correctly migrates up and down service templates' do
+ reversible_migration do |migration|
+ migration.before -> do
+ expect(services.count).to eq(5)
+
+ expect(all_service_properties)
+ .to match(a_collection_containing_exactly(nil, {}, { 'some' => 'value' }))
+ end
+
+ migration.after -> do
+ expect(services.count).to eq(4)
+
+ expect(all_service_properties)
+ .to match(a_collection_containing_exactly(nil, { 'some' => 'value' }))
+ end
+ end
+ end
+
+ def create_service(params)
+ data = { template: true, type: 'GithubService' }
+ data.merge!(params)
+
+ services.create!(data)
+ end
+end
diff --git a/spec/migrations/remove_redundant_pipeline_stages_spec.rb b/spec/migrations/remove_redundant_pipeline_stages_spec.rb
index 8325f986594..ad905d7eb8a 100644
--- a/spec/migrations/remove_redundant_pipeline_stages_spec.rb
+++ b/spec/migrations/remove_redundant_pipeline_stages_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180119121225_remove_redundant_pipeline_stages.rb')
diff --git a/spec/migrations/reschedule_builds_stages_migration_spec.rb b/spec/migrations/reschedule_builds_stages_migration_spec.rb
index 3bfd9dd9f6b..f9707d8f90b 100644
--- a/spec/migrations/reschedule_builds_stages_migration_spec.rb
+++ b/spec/migrations/reschedule_builds_stages_migration_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180405101928_reschedule_builds_stages_migration')
diff --git a/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb b/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
index 26489ef58bd..a62650c44fb 100644
--- a/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
+++ b/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180309121820_reschedule_commits_count_for_merge_request_diff')
diff --git a/spec/migrations/schedule_digest_personal_access_tokens_spec.rb b/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
index 6d155f78342..ff859d07ff2 100644
--- a/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
+++ b/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180913142237_schedule_digest_personal_access_tokens.rb')
@@ -32,7 +34,7 @@ describe ScheduleDigestPersonalAccessTokens, :migration, :sidekiq do
end
end
- it 'schedules background migrations' do
+ it 'schedules background migrations', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
plain_text_token = 'token IS NOT NULL'
diff --git a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
index 54f3e264df0..a0241f1d20c 100644
--- a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
+++ b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190524073827_schedule_fill_valid_time_for_pages_domain_certificates.rb')
@@ -32,7 +34,7 @@ describe ScheduleFillValidTimeForPagesDomainCertificates, :migration, :sidekiq d
end
end
- it 'sets certificate valid_not_before/not_after' do
+ it 'sets certificate valid_not_before/not_after', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
migrate!
diff --git a/spec/migrations/schedule_runners_token_encryption_spec.rb b/spec/migrations/schedule_runners_token_encryption_spec.rb
index 97ff6c128f3..6b9538c4d17 100644
--- a/spec/migrations/schedule_runners_token_encryption_spec.rb
+++ b/spec/migrations/schedule_runners_token_encryption_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181121111200_schedule_runners_token_encryption')
diff --git a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
index fa4ddd5fbc7..845b0515177 100644
--- a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
+++ b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180104131052_schedule_set_confidential_note_events_on_webhooks.rb')
@@ -30,7 +32,7 @@ describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do
end
end
- it 'correctly processes web hooks' do
+ it 'correctly processes web hooks', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
expect(web_hooks_table.where(confidential_note_events: nil).count).to eq 4
expect(web_hooks_table.where(confidential_note_events: true).count).to eq 1
diff --git a/spec/migrations/schedule_stages_index_migration_spec.rb b/spec/migrations/schedule_stages_index_migration_spec.rb
index 710264da375..9ebc648f9d8 100644
--- a/spec/migrations/schedule_stages_index_migration_spec.rb
+++ b/spec/migrations/schedule_stages_index_migration_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180420080616_schedule_stages_index_migration')
diff --git a/spec/migrations/schedule_sync_issuables_state_id_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
index bc94f8820bd..4f841e8ce04 100644
--- a/spec/migrations/schedule_sync_issuables_state_id_spec.rb
+++ b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
@@ -33,7 +33,7 @@ describe ScheduleSyncIssuablesStateId, :migration, :sidekiq do
describe '#up' do
context 'issues' do
- it 'migrates state column to integer' do
+ it 'migrates state column to integer', :sidekiq_might_not_need_inline do
opened_issue = issues.create!(description: 'first', state: 'opened')
closed_issue = issues.create!(description: 'second', state: 'closed')
invalid_state_issue = issues.create!(description: 'fourth', state: 'not valid')
@@ -55,7 +55,7 @@ describe ScheduleSyncIssuablesStateId, :migration, :sidekiq do
end
context 'merge requests' do
- it 'migrates state column to integer' do
+ it 'migrates state column to integer', :sidekiq_might_not_need_inline do
opened_merge_request = merge_requests.create!(state: 'opened', target_project_id: project.id, target_branch: 'feature1', source_branch: 'master')
closed_merge_request = merge_requests.create!(state: 'closed', target_project_id: project.id, target_branch: 'feature2', source_branch: 'master')
merged_merge_request = merge_requests.create!(state: 'merged', target_project_id: project.id, target_branch: 'feature3', source_branch: 'master')
diff --git a/spec/migrations/schedule_to_archive_legacy_traces_spec.rb b/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
index d3eac3c45ea..a81fb1494c7 100644
--- a/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
+++ b/spec/migrations/schedule_to_archive_legacy_traces_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180529152628_schedule_to_archive_legacy_traces')
@@ -23,7 +25,7 @@ describe ScheduleToArchiveLegacyTraces, :migration do
create_legacy_trace(@build_running, 'This job is not done yet')
end
- it 'correctly archive legacy traces' do
+ it 'correctly archive legacy traces', :sidekiq_might_not_need_inline do
expect(job_artifacts.count).to eq(0)
expect(File.exist?(legacy_trace_path(@build_success))).to be_truthy
expect(File.exist?(legacy_trace_path(@build_failed))).to be_truthy
diff --git a/spec/migrations/truncate_user_fullname_spec.rb b/spec/migrations/truncate_user_fullname_spec.rb
index 17fd4d9f688..65b870de7b8 100644
--- a/spec/migrations/truncate_user_fullname_spec.rb
+++ b/spec/migrations/truncate_user_fullname_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190325080727_truncate_user_fullname.rb')
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index 83d6ff754c5..9d18618f638 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -16,8 +16,16 @@ describe Analytics::CycleAnalytics::ProjectStage do
end
end
- it_behaves_like "cycle analytics stage" do
+ it_behaves_like 'cycle analytics stage' do
let(:parent) { create(:project) }
let(:parent_name) { :project }
end
+
+ context 'relative positioning' do
+ it_behaves_like 'a class that supports relative positioning' do
+ let(:project) { create(:project) }
+ let(:factory) { :cycle_analytics_project_stage }
+ let(:default_params) { { project: project } }
+ end
+ end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 7bef3d30064..ba3b99f4421 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe ApplicationSetting do
+ using RSpec::Parameterized::TableSyntax
+
subject(:setting) { described_class.create_from_defaults }
it { include(CacheableAttributes) }
@@ -64,6 +66,24 @@ describe ApplicationSetting do
it { is_expected.not_to allow_value('three').for(:push_event_activities_limit) }
it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
+ context 'when snowplow is enabled' do
+ before do
+ setting.snowplow_enabled = true
+ end
+
+ it { is_expected.not_to allow_value(nil).for(:snowplow_collector_hostname) }
+ it { is_expected.to allow_value("snowplow.gitlab.com").for(:snowplow_collector_hostname) }
+ it { is_expected.not_to allow_value('/example').for(:snowplow_collector_hostname) }
+ it { is_expected.to allow_value('https://example.org').for(:snowplow_iglu_registry_url) }
+ it { is_expected.not_to allow_value('not-a-url').for(:snowplow_iglu_registry_url) }
+ it { is_expected.to allow_value(nil).for(:snowplow_iglu_registry_url) }
+ end
+
+ context 'when snowplow is not enabled' do
+ it { is_expected.to allow_value(nil).for(:snowplow_collector_hostname) }
+ it { is_expected.to allow_value(nil).for(:snowplow_iglu_registry_url) }
+ end
+
context "when user accepted let's encrypt terms of service" do
before do
setting.update(lets_encrypt_terms_of_service_accepted: true)
@@ -72,6 +92,37 @@ describe ApplicationSetting do
it { is_expected.not_to allow_value(nil).for(:lets_encrypt_notification_email) }
end
+ describe 'EKS integration' do
+ before do
+ setting.eks_integration_enabled = eks_enabled
+ end
+
+ context 'integration is disabled' do
+ let(:eks_enabled) { false }
+
+ it { is_expected.to allow_value(nil).for(:eks_account_id) }
+ it { is_expected.to allow_value(nil).for(:eks_access_key_id) }
+ it { is_expected.to allow_value(nil).for(:eks_secret_access_key) }
+ end
+
+ context 'integration is enabled' do
+ let(:eks_enabled) { true }
+
+ it { is_expected.to allow_value('123456789012').for(:eks_account_id) }
+ it { is_expected.not_to allow_value(nil).for(:eks_account_id) }
+ it { is_expected.not_to allow_value('123').for(:eks_account_id) }
+ it { is_expected.not_to allow_value('12345678901a').for(:eks_account_id) }
+
+ it { is_expected.to allow_value('access-key-id-12').for(:eks_access_key_id) }
+ it { is_expected.not_to allow_value('a' * 129).for(:eks_access_key_id) }
+ it { is_expected.not_to allow_value('short-key').for(:eks_access_key_id) }
+ it { is_expected.not_to allow_value(nil).for(:eks_access_key_id) }
+
+ it { is_expected.to allow_value('secret-access-key').for(:eks_secret_access_key) }
+ it { is_expected.not_to allow_value(nil).for(:eks_secret_access_key) }
+ end
+ end
+
describe 'default_artifacts_expire_in' do
it 'sets an error if it cannot parse' do
setting.update(default_artifacts_expire_in: 'a')
@@ -446,6 +497,15 @@ describe ApplicationSetting do
it { is_expected.not_to allow_value(nil).for(:static_objects_external_storage_auth_token) }
end
end
+
+ context 'sourcegraph settings' do
+ it 'is invalid if sourcegraph is enabled and no url is provided' do
+ allow(subject).to receive(:sourcegraph_enabled).and_return(true)
+
+ expect(subject.sourcegraph_url).to be_nil
+ is_expected.to be_invalid
+ end
+ end
end
context 'restrict creating duplicates' do
@@ -534,5 +594,24 @@ describe ApplicationSetting do
end
end
+ describe '#sourcegraph_url_is_com?' do
+ where(:url, :is_com) do
+ 'https://sourcegraph.com' | true
+ 'https://sourcegraph.com/' | true
+ 'https://www.sourcegraph.com' | true
+ 'shttps://www.sourcegraph.com' | false
+ 'https://sourcegraph.example.com/' | false
+ 'https://sourcegraph.org/' | false
+ end
+
+ with_them do
+ it 'matches the url with sourcegraph.com' do
+ setting.sourcegraph_url = url
+
+ expect(setting.sourcegraph_url_is_com?).to eq(is_com)
+ end
+ end
+ end
+
it_behaves_like 'application settings examples'
end
diff --git a/spec/models/aws/role_spec.rb b/spec/models/aws/role_spec.rb
index c40752e40a6..d4165567146 100644
--- a/spec/models/aws/role_spec.rb
+++ b/spec/models/aws/role_spec.rb
@@ -31,4 +31,56 @@ describe Aws::Role do
end
end
end
+
+ describe 'callbacks' do
+ describe '#ensure_role_external_id!' do
+ subject { role.validate }
+
+ context 'for a new record' do
+ let(:role) { build(:aws_role, role_external_id: nil) }
+
+ it 'calls #ensure_role_external_id!' do
+ expect(role).to receive(:ensure_role_external_id!)
+
+ subject
+ end
+ end
+
+ context 'for an existing record' do
+ let(:role) { create(:aws_role) }
+
+ it 'does not call #ensure_role_external_id!' do
+ expect(role).not_to receive(:ensure_role_external_id!)
+
+ subject
+ end
+ end
+ end
+ end
+
+ describe '#ensure_role_external_id!' do
+ let(:role) { build(:aws_role, role_external_id: external_id) }
+
+ subject { role.ensure_role_external_id! }
+
+ context 'role_external_id is blank' do
+ let(:external_id) { nil }
+
+ it 'generates an external ID and assigns it to the record' do
+ subject
+
+ expect(role.role_external_id).to be_present
+ end
+ end
+
+ context 'role_external_id is already set' do
+ let(:external_id) { 'external-id' }
+
+ it 'does not change the existing external id' do
+ subject
+
+ expect(role.role_external_id).to eq external_id
+ end
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 058305bc04e..24fa3b9b1ea 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -206,6 +206,35 @@ describe Ci::Build do
end
end
+ describe '.with_exposed_artifacts' do
+ subject { described_class.with_exposed_artifacts }
+
+ let!(:job1) { create(:ci_build) }
+ let!(:job2) { create(:ci_build, options: options) }
+ let!(:job3) { create(:ci_build) }
+
+ context 'when some jobs have exposed artifacs and some not' do
+ let(:options) { { artifacts: { expose_as: 'test', paths: ['test'] } } }
+
+ before do
+ job1.ensure_metadata.update!(has_exposed_artifacts: nil)
+ job3.ensure_metadata.update!(has_exposed_artifacts: false)
+ end
+
+ it 'selects only the jobs with exposed artifacts' do
+ is_expected.to eq([job2])
+ end
+ end
+
+ context 'when job does not expose artifacts' do
+ let(:options) { nil }
+
+ it 'returns an empty array' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '.with_reports' do
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
@@ -1558,7 +1587,7 @@ describe Ci::Build do
end
end
- describe '#retries_max' do
+ describe '#options_retry_max' do
context 'with retries max config option' do
subject { create(:ci_build, options: { retry: { max: 1 } }) }
@@ -1568,7 +1597,7 @@ describe Ci::Build do
end
it 'returns the number of configured max retries' do
- expect(subject.retries_max).to eq 1
+ expect(subject.options_retry_max).to eq 1
end
end
@@ -1578,7 +1607,7 @@ describe Ci::Build do
end
it 'returns the number of configured max retries' do
- expect(subject.retries_max).to eq 1
+ expect(subject.options_retry_max).to eq 1
end
end
end
@@ -1586,16 +1615,16 @@ describe Ci::Build do
context 'without retries max config option' do
subject { create(:ci_build) }
- it 'returns zero' do
- expect(subject.retries_max).to eq 0
+ it 'returns nil' do
+ expect(subject.options_retry_max).to be_nil
end
end
context 'when build is degenerated' do
subject { create(:ci_build, :degenerated) }
- it 'returns zero' do
- expect(subject.retries_max).to eq 0
+ it 'returns nil' do
+ expect(subject.options_retry_max).to be_nil
end
end
@@ -1603,17 +1632,17 @@ describe Ci::Build do
subject { create(:ci_build, options: { retry: 1 }) }
it 'returns the number of configured max retries' do
- expect(subject.retries_max).to eq 1
+ expect(subject.options_retry_max).to eq 1
end
end
end
- describe '#retry_when' do
+ describe '#options_retry_when' do
context 'with retries when config option' do
subject { create(:ci_build, options: { retry: { when: ['some_reason'] } }) }
it 'returns the configured when' do
- expect(subject.retry_when).to eq ['some_reason']
+ expect(subject.options_retry_when).to eq ['some_reason']
end
end
@@ -1621,7 +1650,7 @@ describe Ci::Build do
subject { create(:ci_build) }
it 'returns always array' do
- expect(subject.retry_when).to eq ['always']
+ expect(subject.options_retry_when).to eq ['always']
end
end
@@ -1629,72 +1658,38 @@ describe Ci::Build do
subject { create(:ci_build, options: { retry: 1 }) }
it 'returns always array' do
- expect(subject.retry_when).to eq ['always']
+ expect(subject.options_retry_when).to eq ['always']
end
end
end
describe '#retry_failure?' do
- subject { create(:ci_build) }
+ using RSpec::Parameterized::TableSyntax
- context 'when retries max is zero' do
- before do
- expect(subject).to receive(:retries_max).at_least(:once).and_return(0)
- end
+ let(:build) { create(:ci_build) }
- it 'returns false' do
- expect(subject.retry_failure?).to eq false
- end
- end
+ subject { build.retry_failure? }
- context 'when retries max equals retries count' do
- before do
- expect(subject).to receive(:retries_max).at_least(:once).and_return(1)
- expect(subject).to receive(:retries_count).at_least(:once).and_return(1)
- end
-
- it 'returns false' do
- expect(subject.retry_failure?).to eq false
- end
+ where(:description, :retry_count, :options, :failure_reason, :result) do
+ "retries are disabled" | 0 | { max: 0 } | nil | false
+ "max equals count" | 2 | { max: 2 } | nil | false
+ "max is higher than count" | 1 | { max: 2 } | nil | true
+ "matching failure reason" | 0 | { when: %w[api_failure], max: 2 } | :api_failure | true
+ "not matching with always" | 0 | { when: %w[always], max: 2 } | :api_failure | true
+ "not matching reason" | 0 | { when: %w[script_error], max: 2 } | :api_failure | false
+ "scheduler failure override" | 1 | { when: %w[scheduler_failure], max: 1 } | :scheduler_failure | false
+ "default for scheduler failure" | 1 | {} | :scheduler_failure | true
end
- context 'when retries max is higher than retries count' do
+ with_them do
before do
- expect(subject).to receive(:retries_max).at_least(:once).and_return(2)
- expect(subject).to receive(:retries_count).at_least(:once).and_return(1)
- end
+ allow(build).to receive(:retries_count) { retry_count }
- context 'and retry when is always' do
- before do
- expect(subject).to receive(:retry_when).at_least(:once).and_return(['always'])
- end
-
- it 'returns true' do
- expect(subject.retry_failure?).to eq true
- end
- end
-
- context 'and retry when includes the failure_reason' do
- before do
- expect(subject).to receive(:failure_reason).at_least(:once).and_return('some_reason')
- expect(subject).to receive(:retry_when).at_least(:once).and_return(['some_reason'])
- end
-
- it 'returns true' do
- expect(subject.retry_failure?).to eq true
- end
+ build.options[:retry] = options
+ build.failure_reason = failure_reason
end
- context 'and retry when does not include failure_reason' do
- before do
- expect(subject).to receive(:failure_reason).at_least(:once).and_return('some_reason')
- expect(subject).to receive(:retry_when).at_least(:once).and_return(['some', 'other failure'])
- end
-
- it 'returns false' do
- expect(subject.retry_failure?).to eq false
- end
- end
+ it { is_expected.to eq(result) }
end
end
end
@@ -1844,6 +1839,14 @@ describe Ci::Build do
expect(build.metadata.read_attribute(:config_options)).to be_nil
end
end
+
+ context 'when options include artifacts:expose_as' do
+ let(:build) { create(:ci_build, options: { artifacts: { expose_as: 'test' } }) }
+
+ it 'saves the presence of expose_as into build metadata' do
+ expect(build.metadata).to have_exposed_artifacts
+ end
+ end
end
describe '#other_manual_actions' do
@@ -2218,7 +2221,7 @@ describe Ci::Build do
{ key: 'CI_PAGES_URL', value: project.pages_url, public: true, masked: false },
{ key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false },
{ key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
- { key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true, masked: false },
+ { key: 'CI_CONFIG_PATH', value: pipeline.config_path, public: true, masked: false },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
{ key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true, masked: false },
{ key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true, masked: false },
@@ -2664,11 +2667,17 @@ describe Ci::Build do
it { is_expected.to include(deployment_variable) }
end
+ context 'when project has default CI config path' do
+ let(:ci_config_path) { { key: 'CI_CONFIG_PATH', value: '.gitlab-ci.yml', public: true, masked: false } }
+
+ it { is_expected.to include(ci_config_path) }
+ end
+
context 'when project has custom CI config path' do
let(:ci_config_path) { { key: 'CI_CONFIG_PATH', value: 'custom', public: true, masked: false } }
before do
- project.update(ci_config_path: 'custom')
+ expect_any_instance_of(Project).to receive(:ci_config_path) { 'custom' }
end
it { is_expected.to include(ci_config_path) }
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 59db347582b..96d81f4cc49 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
@@ -63,7 +65,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis }
before do
- build_trace_chunk.send(:unsafe_set_data!, 'Sample data in redis')
+ build_trace_chunk.send(:unsafe_set_data!, +'Sample data in redis')
end
it { is_expected.to eq('Sample data in redis') }
@@ -71,7 +73,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is database' do
let(:data_store) { :database }
- let(:raw_data) { 'Sample data in database' }
+ let(:raw_data) { +'Sample data in database' }
it { is_expected.to eq('Sample data in database') }
end
@@ -80,7 +82,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :fog }
before do
- build_trace_chunk.send(:unsafe_set_data!, 'Sample data in fog')
+ build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
end
it { is_expected.to eq('Sample data in fog') }
@@ -90,7 +92,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
describe '#append' do
subject { build_trace_chunk.append(new_data, offset) }
- let(:new_data) { 'Sample new data' }
+ let(:new_data) { +'Sample new data' }
let(:offset) { 0 }
let(:merged_data) { data + new_data.to_s }
@@ -143,7 +145,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when new_data is empty' do
- let(:new_data) { '' }
+ let(:new_data) { +'' }
it 'does not append' do
subject
@@ -172,7 +174,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
context 'when new data fulfilled chunk size' do
- let(:new_data) { 'a' * described_class::CHUNK_SIZE }
+ let(:new_data) { +'a' * described_class::CHUNK_SIZE }
it 'schedules trace chunk flush worker' do
expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
@@ -180,7 +182,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
subject
end
- it 'migrates data to object storage' do
+ it 'migrates data to object storage', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
subject
@@ -194,7 +196,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
shared_examples_for 'Scheduling no sidekiq worker' do
context 'when new data fulfilled chunk size' do
- let(:new_data) { 'a' * described_class::CHUNK_SIZE }
+ let(:new_data) { +'a' * described_class::CHUNK_SIZE }
it 'does not schedule trace chunk flush worker' do
expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
@@ -219,7 +221,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis }
context 'when there are no data' do
- let(:data) { '' }
+ let(:data) { +'' }
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
@@ -230,7 +232,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when there are some data' do
- let(:data) { 'Sample data in redis' }
+ let(:data) { +'Sample data in redis' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
@@ -249,7 +251,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :database }
context 'when there are no data' do
- let(:data) { '' }
+ let(:data) { +'' }
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
@@ -260,7 +262,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when there are some data' do
- let(:raw_data) { 'Sample data in database' }
+ let(:raw_data) { +'Sample data in database' }
let(:data) { raw_data }
it 'has data' do
@@ -276,7 +278,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :fog }
context 'when there are no data' do
- let(:data) { '' }
+ let(:data) { +'' }
it 'has no data' do
expect(build_trace_chunk.data).to be_empty
@@ -287,7 +289,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when there are some data' do
- let(:data) { 'Sample data in fog' }
+ let(:data) { +'Sample data in fog' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
@@ -332,7 +334,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is redis' do
let(:data_store) { :redis }
- let(:data) { 'Sample data in redis' }
+ let(:data) { +'Sample data in redis' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
@@ -343,7 +345,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is database' do
let(:data_store) { :database }
- let(:raw_data) { 'Sample data in database' }
+ let(:raw_data) { +'Sample data in database' }
let(:data) { raw_data }
it_behaves_like 'truncates'
@@ -351,7 +353,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is fog' do
let(:data_store) { :fog }
- let(:data) { 'Sample data in fog' }
+ let(:data) { +'Sample data in fog' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
@@ -368,7 +370,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis }
context 'when data exists' do
- let(:data) { 'Sample data in redis' }
+ let(:data) { +'Sample data in redis' }
before do
build_trace_chunk.send(:unsafe_set_data!, data)
@@ -386,7 +388,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :database }
context 'when data exists' do
- let(:raw_data) { 'Sample data in database' }
+ let(:raw_data) { +'Sample data in database' }
let(:data) { raw_data }
it { is_expected.to eq(data.bytesize) }
@@ -401,7 +403,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :fog }
context 'when data exists' do
- let(:data) { 'Sample data in fog' }
+ let(:data) { +'Sample data in fog' }
let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" }
before do
@@ -443,7 +445,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data size reached CHUNK_SIZE' do
- let(:data) { 'a' * described_class::CHUNK_SIZE }
+ let(:data) { +'a' * described_class::CHUNK_SIZE }
it 'persists the data' do
expect(build_trace_chunk.redis?).to be_truthy
@@ -463,7 +465,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data size has not reached CHUNK_SIZE' do
- let(:data) { 'Sample data in redis' }
+ let(:data) { +'Sample data in redis' }
it 'does not persist the data and the orignal data is intact' do
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
@@ -492,7 +494,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data size reached CHUNK_SIZE' do
- let(:data) { 'a' * described_class::CHUNK_SIZE }
+ let(:data) { +'a' * described_class::CHUNK_SIZE }
it 'persists the data' do
expect(build_trace_chunk.database?).to be_truthy
@@ -512,7 +514,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data size has not reached CHUNK_SIZE' do
- let(:data) { 'Sample data in database' }
+ let(:data) { +'Sample data in database' }
it 'does not persist the data and the orignal data is intact' do
expect { subject }.to raise_error(described_class::FailedToPersistDataError)
@@ -561,7 +563,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data size has not reached CHUNK_SIZE' do
- let(:data) { 'Sample data in fog' }
+ let(:data) { +'Sample data in fog' }
it 'does not raise error' do
expect { subject }.not_to raise_error
@@ -582,7 +584,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
shared_examples_for 'deletes all build_trace_chunk and data in redis' do
- it do
+ it 'deletes all build_trace_chunk and data in redis', :sidekiq_might_not_need_inline do
Gitlab::Redis::SharedState.with do |redis|
expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3)
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index de0ce9932e8..d24cf3d2115 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -979,141 +979,6 @@ describe Ci::Pipeline, :mailer do
end
describe 'pipeline stages' do
- describe '#stage_seeds' do
- let(:pipeline) { build(:ci_pipeline, config: config) }
- let(:config) { { rspec: { script: 'rake' } } }
-
- it 'returns preseeded stage seeds object' do
- expect(pipeline.stage_seeds)
- .to all(be_a Gitlab::Ci::Pipeline::Seed::Base)
- expect(pipeline.stage_seeds.count).to eq 1
- end
-
- context 'when no refs policy is specified' do
- let(:config) do
- { production: { stage: 'deploy', script: 'cap prod' },
- rspec: { stage: 'test', script: 'rspec' },
- spinach: { stage: 'test', script: 'spinach' } }
- end
-
- it 'correctly fabricates a stage seeds object' do
- seeds = pipeline.stage_seeds
-
- expect(seeds.size).to eq 2
- expect(seeds.first.attributes[:name]).to eq 'test'
- expect(seeds.second.attributes[:name]).to eq 'deploy'
- expect(seeds.dig(0, 0, :name)).to eq 'rspec'
- expect(seeds.dig(0, 1, :name)).to eq 'spinach'
- expect(seeds.dig(1, 0, :name)).to eq 'production'
- end
- end
-
- context 'when refs policy is specified' do
- let(:pipeline) do
- build(:ci_pipeline, ref: 'feature', tag: true, config: config)
- end
-
- let(:config) do
- { production: { stage: 'deploy', script: 'cap prod', only: ['master'] },
- spinach: { stage: 'test', script: 'spinach', only: ['tags'] } }
- end
-
- it 'returns stage seeds only assigned to master to master' do
- seeds = pipeline.stage_seeds
-
- expect(seeds.size).to eq 1
- expect(seeds.first.attributes[:name]).to eq 'test'
- expect(seeds.dig(0, 0, :name)).to eq 'spinach'
- end
- end
-
- context 'when source policy is specified' do
- let(:pipeline) { build(:ci_pipeline, source: :schedule, config: config) }
-
- let(:config) do
- { production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] },
- spinach: { stage: 'test', script: 'spinach', only: ['schedules'] } }
- end
-
- it 'returns stage seeds only assigned to schedules' do
- seeds = pipeline.stage_seeds
-
- expect(seeds.size).to eq 1
- expect(seeds.first.attributes[:name]).to eq 'test'
- expect(seeds.dig(0, 0, :name)).to eq 'spinach'
- end
- end
-
- context 'when kubernetes policy is specified' do
- let(:config) do
- {
- spinach: { stage: 'test', script: 'spinach' },
- production: {
- stage: 'deploy',
- script: 'cap',
- only: { kubernetes: 'active' }
- }
- }
- end
-
- context 'when kubernetes is active' do
- context 'when user configured kubernetes from CI/CD > Clusters' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
- let(:pipeline) { build(:ci_pipeline, project: project, config: config) }
-
- it 'returns seeds for kubernetes dependent job' do
- seeds = pipeline.stage_seeds
-
- expect(seeds.size).to eq 2
- expect(seeds.dig(0, 0, :name)).to eq 'spinach'
- expect(seeds.dig(1, 0, :name)).to eq 'production'
- end
- end
- end
-
- context 'when kubernetes is not active' do
- it 'does not return seeds for kubernetes dependent job' do
- seeds = pipeline.stage_seeds
-
- expect(seeds.size).to eq 1
- expect(seeds.dig(0, 0, :name)).to eq 'spinach'
- end
- end
- end
-
- context 'when variables policy is specified' do
- let(:config) do
- { unit: { script: 'minitest', only: { variables: ['$CI_PIPELINE_SOURCE'] } },
- feature: { script: 'spinach', only: { variables: ['$UNDEFINED'] } } }
- end
-
- it 'returns stage seeds only when variables expression is truthy' do
- seeds = pipeline.stage_seeds
-
- expect(seeds.size).to eq 1
- expect(seeds.dig(0, 0, :name)).to eq 'unit'
- end
- end
- end
-
- describe '#seeds_size' do
- context 'when refs policy is specified' do
- let(:config) do
- { production: { stage: 'deploy', script: 'cap prod', only: ['master'] },
- spinach: { stage: 'test', script: 'spinach', only: ['tags'] } }
- end
-
- let(:pipeline) do
- build(:ci_pipeline, ref: 'feature', tag: true, config: config)
- end
-
- it 'returns real seeds size' do
- expect(pipeline.seeds_size).to eq 1
- end
- end
- end
-
describe 'legacy stages' do
before do
create(:commit_status, pipeline: pipeline,
@@ -1346,7 +1211,7 @@ describe Ci::Pipeline, :mailer do
end
end
- describe '#duration' do
+ describe '#duration', :sidekiq_might_not_need_inline do
context 'when multiple builds are finished' do
before do
travel_to(current + 30) do
@@ -1422,7 +1287,7 @@ describe Ci::Pipeline, :mailer do
end
describe '#finished_at' do
- it 'updates on transitioning to success' do
+ it 'updates on transitioning to success', :sidekiq_might_not_need_inline do
build.success
expect(pipeline.reload.finished_at).not_to be_nil
@@ -2102,7 +1967,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.not_to include('created', 'preparing', 'pending') }
end
- describe '#status' do
+ describe '#status', :sidekiq_might_not_need_inline do
let(:build) do
create(:ci_build, :created, pipeline: pipeline, name: 'test')
end
@@ -2186,161 +2051,6 @@ describe Ci::Pipeline, :mailer do
end
end
- describe '#ci_yaml_file_path' do
- subject { pipeline.ci_yaml_file_path }
-
- %i[unknown_source repository_source].each do |source|
- context source.to_s do
- before do
- pipeline.config_source = described_class.config_sources.fetch(source)
- end
-
- it 'returns the path from project' do
- allow(pipeline.project).to receive(:ci_config_path) { 'custom/path' }
-
- is_expected.to eq('custom/path')
- end
-
- it 'returns default when custom path is nil' do
- allow(pipeline.project).to receive(:ci_config_path) { nil }
-
- is_expected.to eq('.gitlab-ci.yml')
- end
-
- it 'returns default when custom path is empty' do
- allow(pipeline.project).to receive(:ci_config_path) { '' }
-
- is_expected.to eq('.gitlab-ci.yml')
- end
- end
- end
-
- context 'when pipeline is for auto-devops' do
- before do
- pipeline.config_source = 'auto_devops_source'
- end
-
- it 'does not return config file' do
- is_expected.to be_nil
- end
- end
- end
-
- describe '#set_config_source' do
- context 'when pipelines does not contain needed data and auto devops is disabled' do
- before do
- stub_application_setting(auto_devops_enabled: false)
- end
-
- it 'defines source to be unknown' do
- pipeline.set_config_source
-
- expect(pipeline).to be_unknown_source
- end
- end
-
- context 'when pipeline contains all needed data' do
- let(:pipeline) do
- create(:ci_pipeline, project: project,
- sha: '1234',
- ref: 'master',
- source: :push)
- end
-
- context 'when the repository has a config file' do
- before do
- allow(project.repository).to receive(:gitlab_ci_yml_for)
- .and_return('config')
- end
-
- it 'defines source to be from repository' do
- pipeline.set_config_source
-
- expect(pipeline).to be_repository_source
- end
-
- context 'when loading an object' do
- let(:new_pipeline) { Ci::Pipeline.find(pipeline.id) }
-
- it 'does not redefine the source' do
- # force to overwrite the source
- pipeline.unknown_source!
-
- expect(new_pipeline).to be_unknown_source
- end
- end
- end
-
- context 'when the repository does not have a config file' do
- let(:implied_yml) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content }
-
- context 'auto devops enabled' do
- before do
- allow(project).to receive(:ci_config_path) { 'custom' }
- end
-
- it 'defines source to be auto devops' do
- pipeline.set_config_source
-
- expect(pipeline).to be_auto_devops_source
- end
- end
- end
- end
- end
-
- describe '#ci_yaml_file' do
- let(:implied_yml) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content }
-
- context 'the source is unknown' do
- before do
- pipeline.unknown_source!
- end
-
- it 'returns the configuration if found' do
- allow(pipeline.project.repository).to receive(:gitlab_ci_yml_for)
- .and_return('config')
-
- expect(pipeline.ci_yaml_file).to be_a(String)
- expect(pipeline.ci_yaml_file).not_to eq(implied_yml)
- expect(pipeline.yaml_errors).to be_nil
- end
-
- it 'sets yaml errors if not found' do
- expect(pipeline.ci_yaml_file).to be_nil
- expect(pipeline.yaml_errors)
- .to start_with('Failed to load CI/CD config file')
- end
- end
-
- context 'the source is the repository' do
- before do
- pipeline.repository_source!
- end
-
- it 'returns the configuration if found' do
- allow(pipeline.project.repository).to receive(:gitlab_ci_yml_for)
- .and_return('config')
-
- expect(pipeline.ci_yaml_file).to be_a(String)
- expect(pipeline.ci_yaml_file).not_to eq(implied_yml)
- expect(pipeline.yaml_errors).to be_nil
- end
- end
-
- context 'when the source is auto_devops_source' do
- before do
- stub_application_setting(auto_devops_enabled: true)
- pipeline.auto_devops_source!
- end
-
- it 'finds the implied config' do
- expect(pipeline.ci_yaml_file).to eq(implied_yml)
- expect(pipeline.yaml_errors).to be_nil
- end
- end
- end
-
describe '#update_status' do
context 'when pipeline is empty' do
it 'updates does not change pipeline status' do
@@ -2675,7 +2385,7 @@ describe Ci::Pipeline, :mailer do
stub_full_request(hook.url, method: :post)
end
- context 'with multiple builds' do
+ context 'with multiple builds', :sidekiq_might_not_need_inline do
context 'when build is queued' do
before do
build_a.enqueue
@@ -2886,24 +2596,19 @@ describe Ci::Pipeline, :mailer do
end
describe '#has_yaml_errors?' do
- context 'when pipeline has errors' do
- let(:pipeline) do
- create(:ci_pipeline, config: { rspec: nil })
+ context 'when yaml_errors is set' do
+ before do
+ pipeline.yaml_errors = 'File not found'
end
- it 'contains yaml errors' do
+ it 'returns true if yaml_errors is set' do
expect(pipeline).to have_yaml_errors
+ expect(pipeline.yaml_errors).to include('File not foun')
end
end
- context 'when pipeline does not have errors' do
- let(:pipeline) do
- create(:ci_pipeline, config: { rspec: { script: 'rake test' } })
- end
-
- it 'does not contain yaml errors' do
- expect(pipeline).not_to have_yaml_errors
- end
+ it 'returns false if yaml_errors is not set' do
+ expect(pipeline).not_to have_yaml_errors
end
end
@@ -2930,7 +2635,7 @@ describe Ci::Pipeline, :mailer do
end
shared_examples 'sending a notification' do
- it 'sends an email' do
+ it 'sends an email', :sidekiq_might_not_need_inline do
should_only_email(pipeline.user, kind: :bcc)
end
end
diff --git a/spec/models/clusters/applications/cert_manager_spec.rb b/spec/models/clusters/applications/cert_manager_spec.rb
index c1933c578bc..6b85f9bb127 100644
--- a/spec/models/clusters/applications/cert_manager_spec.rb
+++ b/spec/models/clusters/applications/cert_manager_spec.rb
@@ -54,7 +54,7 @@ describe Clusters::Applications::CertManager do
'kubectl label --overwrite namespace gitlab-managed-apps certmanager.k8s.io/disable-validation=true'
])
expect(subject.postinstall).to eq([
- "for i in $(seq 1 30); do kubectl apply -f /data/helm/certmanager/config/cluster_issuer.yaml && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
+ "for i in $(seq 1 90); do kubectl apply -f /data/helm/certmanager/config/cluster_issuer.yaml && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
])
end
diff --git a/spec/models/clusters/applications/crossplane_spec.rb b/spec/models/clusters/applications/crossplane_spec.rb
new file mode 100644
index 00000000000..ebc675497f4
--- /dev/null
+++ b/spec/models/clusters/applications/crossplane_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Applications::Crossplane do
+ let(:crossplane) { create(:clusters_applications_crossplane) }
+
+ include_examples 'cluster application core specs', :clusters_applications_crossplane
+ include_examples 'cluster application status specs', :clusters_applications_crossplane
+ include_examples 'cluster application version specs', :clusters_applications_crossplane
+ include_examples 'cluster application initial status specs'
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:stack) }
+ end
+
+ describe '#can_uninstall?' do
+ subject { crossplane.can_uninstall? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#install_command' do
+ let(:stack) { 'gcp' }
+
+ subject { crossplane.install_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) }
+
+ it 'is initialized with crossplane arguments' do
+ expect(subject.name).to eq('crossplane')
+ expect(subject.chart).to eq('crossplane/crossplane')
+ expect(subject.repository).to eq('https://charts.crossplane.io/alpha')
+ expect(subject.version).to eq('0.4.1')
+ expect(subject).to be_rbac
+ end
+
+ context 'application failed to install previously' do
+ let(:crossplane) { create(:clusters_applications_crossplane, :errored, version: '0.0.1') }
+
+ it 'is initialized with the locked version' do
+ expect(subject.version).to eq('0.4.1')
+ end
+ end
+ end
+
+ describe '#files' do
+ let(:application) { crossplane }
+ let(:values) { subject[:'values.yaml'] }
+
+ subject { application.files }
+
+ it 'includes crossplane specific keys in the values.yaml file' do
+ expect(values).to include('clusterStacks')
+ end
+ end
+end
diff --git a/spec/models/clusters/applications/elastic_stack_spec.rb b/spec/models/clusters/applications/elastic_stack_spec.rb
new file mode 100644
index 00000000000..d0e0dd5ad57
--- /dev/null
+++ b/spec/models/clusters/applications/elastic_stack_spec.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Applications::ElasticStack do
+ include KubernetesHelpers
+
+ include_examples 'cluster application core specs', :clusters_applications_elastic_stack
+ include_examples 'cluster application status specs', :clusters_applications_elastic_stack
+ include_examples 'cluster application version specs', :clusters_applications_elastic_stack
+ include_examples 'cluster application helm specs', :clusters_applications_elastic_stack
+
+ describe '#can_uninstall?' do
+ let(:ingress) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') }
+ let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+
+ subject { elastic_stack.can_uninstall? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#set_initial_status' do
+ before do
+ elastic_stack.set_initial_status
+ end
+
+ context 'when ingress is not installed' do
+ let(:cluster) { create(:cluster, :provided_by_gcp) }
+ let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: cluster) }
+
+ it { expect(elastic_stack).to be_not_installable }
+ end
+
+ context 'when ingress is installed and external_ip is assigned' do
+ let(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
+ let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+
+ it { expect(elastic_stack).to be_installable }
+ end
+
+ context 'when ingress is installed and external_hostname is assigned' do
+ let(:ingress) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') }
+ let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+
+ it { expect(elastic_stack).to be_installable }
+ end
+ end
+
+ describe '#install_command' do
+ let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
+ let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+
+ subject { elastic_stack.install_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) }
+
+ it 'is initialized with elastic stack arguments' do
+ expect(subject.name).to eq('elastic-stack')
+ expect(subject.chart).to eq('stable/elastic-stack')
+ expect(subject.version).to eq('1.8.0')
+ expect(subject).to be_rbac
+ expect(subject.files).to eq(elastic_stack.files)
+ end
+
+ context 'on a non rbac enabled cluster' do
+ before do
+ elastic_stack.cluster.platform_kubernetes.abac!
+ end
+
+ it { is_expected.not_to be_rbac }
+ end
+
+ context 'application failed to install previously' do
+ let(:elastic_stack) { create(:clusters_applications_elastic_stack, :errored, version: '0.0.1') }
+
+ it 'is initialized with the locked version' do
+ expect(subject.version).to eq('1.8.0')
+ end
+ end
+ end
+
+ describe '#uninstall_command' do
+ let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
+ let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+
+ subject { elastic_stack.uninstall_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::DeleteCommand) }
+
+ it 'is initialized with elastic stack arguments' do
+ expect(subject.name).to eq('elastic-stack')
+ expect(subject).to be_rbac
+ expect(subject.files).to eq(elastic_stack.files)
+ end
+
+ it 'specifies a post delete command to remove custom resource definitions' do
+ expect(subject.postdelete).to eq([
+ 'kubectl delete pvc --selector release\\=elastic-stack'
+ ])
+ end
+ end
+
+ describe '#files' do
+ let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
+ let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+
+ let(:values) { subject[:'values.yaml'] }
+
+ subject { elastic_stack.files }
+
+ it 'includes elastic stack specific keys in the values.yaml file' do
+ expect(values).to include('ELASTICSEARCH_HOSTS')
+ end
+ end
+
+ describe '#elasticsearch_client' do
+ context 'cluster is nil' do
+ it 'returns nil' do
+ expect(subject.cluster).to be_nil
+ expect(subject.elasticsearch_client).to be_nil
+ end
+ end
+
+ context "cluster doesn't have kubeclient" do
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_elastic_stack, cluster: cluster) }
+
+ it 'returns nil' do
+ expect(subject.elasticsearch_client).to be_nil
+ end
+ end
+
+ context 'cluster has kubeclient' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
+ let(:kube_client) { subject.cluster.kubeclient.core_client }
+
+ subject { create(:clusters_applications_elastic_stack, cluster: cluster) }
+
+ before do
+ subject.cluster.platform_kubernetes.namespace = 'a-namespace'
+ stub_kubeclient_discover(cluster.platform_kubernetes.api_url)
+
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
+
+ it 'creates proxy elasticsearch_client' do
+ expect(subject.elasticsearch_client).to be_instance_of(Elasticsearch::Transport::Client)
+ end
+
+ it 'copies proxy_url, options and headers from kube client to elasticsearch_client' do
+ expect(Elasticsearch::Client)
+ .to(receive(:new))
+ .with(url: a_valid_url)
+ .and_call_original
+
+ client = subject.elasticsearch_client
+ faraday_connection = client.transport.connections.first.connection
+
+ expect(faraday_connection.headers["Authorization"]).to eq(kube_client.headers[:Authorization])
+ expect(faraday_connection.ssl.cert_store).to be_instance_of(OpenSSL::X509::Store)
+ expect(faraday_connection.ssl.verify).to eq(1)
+ end
+
+ context 'when cluster is not reachable' do
+ before do
+ allow(kube_client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
+
+ it 'returns nil' do
+ expect(subject.elasticsearch_client).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index be0c6df7ad6..d7ad7867e1a 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -21,7 +21,7 @@ describe Clusters::Applications::Ingress do
describe '#can_uninstall?' do
subject { ingress.can_uninstall? }
- it 'returns true if application_jupyter_nil_or_installable? AND external_ip_or_hostname? are true' do
+ it 'returns true if external ip is set and no application exists' do
ingress.external_ip = 'IP'
is_expected.to be_truthy
@@ -33,6 +33,12 @@ describe Clusters::Applications::Ingress do
is_expected.to be_falsey
end
+ it 'returns false if application_elastic_stack_nil_or_installable? is false' do
+ create(:clusters_applications_elastic_stack, :installed, cluster: ingress.cluster)
+
+ is_expected.to be_falsey
+ end
+
it 'returns false if external_ip_or_hostname? is false' do
is_expected.to be_falsey
end
@@ -150,6 +156,21 @@ describe Clusters::Applications::Ingress do
it 'includes modsecurity core ruleset enablement' do
expect(subject.values).to include("enable-owasp-modsecurity-crs: 'true'")
end
+
+ it 'includes modsecurity.conf content' do
+ expect(subject.values).to include('modsecurity.conf')
+ # Includes file content from Ingress#modsecurity_config_content
+ expect(subject.values).to include('SecAuditLog')
+
+ expect(subject.values).to include('extraVolumes')
+ expect(subject.values).to include('extraVolumeMounts')
+ end
+
+ it 'includes modsecurity sidecar container' do
+ expect(subject.values).to include('modsecurity-log-volume')
+
+ expect(subject.values).to include('extraContainers')
+ end
end
context 'when ingress_modsecurity is disabled' do
@@ -166,6 +187,21 @@ describe Clusters::Applications::Ingress do
it 'excludes modsecurity core ruleset enablement' do
expect(subject.values).not_to include('enable-owasp-modsecurity-crs')
end
+
+ it 'excludes modsecurity.conf content' do
+ expect(subject.values).not_to include('modsecurity.conf')
+ # Excludes file content from Ingress#modsecurity_config_content
+ expect(subject.values).not_to include('SecAuditLog')
+
+ expect(subject.values).not_to include('extraVolumes')
+ expect(subject.values).not_to include('extraVolumeMounts')
+ end
+
+ it 'excludes modsecurity sidecar container' do
+ expect(subject.values).not_to include('modsecurity-log-volume')
+
+ expect(subject.values).not_to include('extraContainers')
+ end
end
end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 48e3b4d6bae..a163229e15a 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -29,6 +29,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to delegate_method(:status).to(:provider) }
it { is_expected.to delegate_method(:status_reason).to(:provider) }
it { is_expected.to delegate_method(:on_creation?).to(:provider) }
+ it { is_expected.to delegate_method(:knative_pre_installed?).to(:provider) }
it { is_expected.to delegate_method(:active?).to(:platform_kubernetes).with_prefix }
it { is_expected.to delegate_method(:rbac?).to(:platform_kubernetes).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_helm).with_prefix }
@@ -55,7 +56,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
let!(:cluster) { create(:cluster, enabled: true) }
before do
- create(:cluster, enabled: false)
+ create(:cluster, :disabled)
end
it { is_expected.to contain_exactly(cluster) }
@@ -64,7 +65,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe '.disabled' do
subject { described_class.disabled }
- let!(:cluster) { create(:cluster, enabled: false) }
+ let!(:cluster) { create(:cluster, :disabled) }
before do
create(:cluster, enabled: true)
@@ -76,10 +77,10 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe '.user_provided' do
subject { described_class.user_provided }
- let!(:cluster) { create(:cluster, :provided_by_user) }
+ let!(:cluster) { create(:cluster_platform_kubernetes).cluster }
before do
- create(:cluster, :provided_by_gcp)
+ create(:cluster_provider_gcp, :created)
end
it { is_expected.to contain_exactly(cluster) }
@@ -88,7 +89,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe '.gcp_provided' do
subject { described_class.gcp_provided }
- let!(:cluster) { create(:cluster, :provided_by_gcp) }
+ let!(:cluster) { create(:cluster_provider_gcp, :created).cluster }
before do
create(:cluster, :provided_by_user)
@@ -100,7 +101,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe '.gcp_installed' do
subject { described_class.gcp_installed }
- let!(:cluster) { create(:cluster, :provided_by_gcp) }
+ let!(:cluster) { create(:cluster_provider_gcp, :created).cluster }
before do
create(:cluster, :providing_by_gcp)
@@ -112,7 +113,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe '.aws_provided' do
subject { described_class.aws_provided }
- let!(:cluster) { create(:cluster, :provided_by_aws) }
+ let!(:cluster) { create(:cluster_provider_aws, :created).cluster }
before do
create(:cluster, :provided_by_user)
@@ -124,11 +125,11 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe '.aws_installed' do
subject { described_class.aws_installed }
- let!(:cluster) { create(:cluster, :provided_by_aws) }
+ let!(:cluster) { create(:cluster_provider_aws, :created).cluster }
before do
- errored_cluster = create(:cluster, :provided_by_aws)
- errored_cluster.provider.make_errored!("Error message")
+ errored_provider = create(:cluster_provider_aws)
+ errored_provider.make_errored!("Error message")
end
it { is_expected.to contain_exactly(cluster) }
@@ -152,6 +153,16 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
+ describe '.for_project_namespace' do
+ subject { described_class.for_project_namespace(namespace_id) }
+
+ let!(:cluster) { create(:cluster, :project) }
+ let!(:another_cluster) { create(:cluster, :project) }
+ let(:namespace_id) { cluster.first_project.namespace_id }
+
+ it { is_expected.to contain_exactly(cluster) }
+ end
+
describe 'validations' do
subject { cluster.valid? }
@@ -504,13 +515,15 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) }
let!(:cert_manager) { create(:clusters_applications_cert_manager, cluster: cluster) }
+ let!(:crossplane) { create(:clusters_applications_crossplane, cluster: cluster) }
let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
let!(:runner) { create(:clusters_applications_runner, cluster: cluster) }
let!(:jupyter) { create(:clusters_applications_jupyter, cluster: cluster) }
let!(:knative) { create(:clusters_applications_knative, cluster: cluster) }
+ let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: cluster) }
it 'returns a list of created applications' do
- is_expected.to contain_exactly(helm, ingress, cert_manager, prometheus, runner, jupyter, knative)
+ is_expected.to contain_exactly(helm, ingress, cert_manager, crossplane, prometheus, runner, jupyter, knative, elastic_stack)
end
end
end
@@ -675,12 +688,36 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'the cluster has a provider' do
let(:cluster) { create(:cluster, :provided_by_gcp) }
+ let(:provider_status) { :errored }
before do
cluster.provider.make_errored!
end
- it { is_expected.to eq :errored }
+ it { is_expected.to eq provider_status }
+
+ context 'when cluster cleanup is ongoing' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status_name, :cleanup_status) do
+ provider_status | :cleanup_not_started
+ :cleanup_ongoing | :cleanup_uninstalling_applications
+ :cleanup_ongoing | :cleanup_removing_project_namespaces
+ :cleanup_ongoing | :cleanup_removing_service_account
+ :cleanup_errored | :cleanup_errored
+ end
+
+ with_them do
+ it 'returns cleanup_ongoing when uninstalling applications' do
+ cluster.cleanup_status = described_class
+ .state_machines[:cleanup_status]
+ .states[cleanup_status]
+ .value
+
+ is_expected.to eq status_name
+ end
+ end
+ end
end
context 'there is a cached connection status' do
@@ -704,6 +741,83 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
+ describe 'cleanup_status state_machine' do
+ shared_examples 'cleanup_status transition' do
+ let(:cluster) { create(:cluster, from_state) }
+
+ it 'transitions cleanup_status correctly' do
+ expect { subject }.to change { cluster.cleanup_status_name }
+ .from(from_state).to(to_state)
+ end
+
+ it 'schedules a Clusters::Cleanup::*Worker' do
+ expect(expected_worker_class).to receive(:perform_async).with(cluster.id)
+ subject
+ end
+ end
+
+ describe '#start_cleanup!' do
+ let(:expected_worker_class) { Clusters::Cleanup::AppWorker }
+ let(:to_state) { :cleanup_uninstalling_applications }
+
+ subject { cluster.start_cleanup! }
+
+ context 'when cleanup_status is cleanup_not_started' do
+ let(:from_state) { :cleanup_not_started }
+
+ it_behaves_like 'cleanup_status transition'
+ end
+
+ context 'when cleanup_status is errored' do
+ let(:from_state) { :cleanup_errored }
+
+ it_behaves_like 'cleanup_status transition'
+ end
+ end
+
+ describe '#make_cleanup_errored!' do
+ NON_ERRORED_STATES = Clusters::Cluster.state_machines[:cleanup_status].states.keys - [:cleanup_errored]
+
+ NON_ERRORED_STATES.each do |state|
+ it "transitions cleanup_status from #{state} to cleanup_errored" do
+ cluster = create(:cluster, state)
+
+ expect { cluster.make_cleanup_errored! }.to change { cluster.cleanup_status_name }
+ .from(state).to(:cleanup_errored)
+ end
+
+ it "sets error message" do
+ cluster = create(:cluster, state)
+
+ expect { cluster.make_cleanup_errored!("Error Message") }.to change { cluster.cleanup_status_reason }
+ .from(nil).to("Error Message")
+ end
+ end
+ end
+
+ describe '#continue_cleanup!' do
+ context 'when cleanup_status is cleanup_uninstalling_applications' do
+ let(:expected_worker_class) { Clusters::Cleanup::ProjectNamespaceWorker }
+ let(:from_state) { :cleanup_uninstalling_applications }
+ let(:to_state) { :cleanup_removing_project_namespaces }
+
+ subject { cluster.continue_cleanup! }
+
+ it_behaves_like 'cleanup_status transition'
+ end
+
+ context 'when cleanup_status is cleanup_removing_project_namespaces' do
+ let(:expected_worker_class) { Clusters::Cleanup::ServiceAccountWorker }
+ let(:from_state) { :cleanup_removing_project_namespaces }
+ let(:to_state) { :cleanup_removing_service_account }
+
+ subject { cluster.continue_cleanup! }
+
+ it_behaves_like 'cleanup_status transition'
+ end
+ end
+ end
+
describe '#connection_status' do
let(:cluster) { create(:cluster) }
let(:status) { :connected }
@@ -804,26 +918,4 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
end
-
- describe '#knative_pre_installed?' do
- subject { cluster.knative_pre_installed? }
-
- context 'with a GCP provider without cloud_run' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'with a GCP provider with cloud_run' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :cloud_run_enabled) }
-
- it { is_expected.to be_truthy }
- end
-
- context 'with a user provider' do
- let(:cluster) { create(:cluster, :provided_by_user) }
-
- it { is_expected.to be_falsey }
- end
- end
end
diff --git a/spec/models/clusters/clusters_hierarchy_spec.rb b/spec/models/clusters/clusters_hierarchy_spec.rb
index fc35b8257e9..1957e1fc5ee 100644
--- a/spec/models/clusters/clusters_hierarchy_spec.rb
+++ b/spec/models/clusters/clusters_hierarchy_spec.rb
@@ -42,6 +42,28 @@ describe Clusters::ClustersHierarchy do
it 'returns clusters for project' do
expect(base_and_ancestors(cluster.project)).to eq([cluster])
end
+
+ context 'cluster has management project' do
+ let(:management_project) { create(:project, namespace: cluster.first_project.namespace) }
+
+ before do
+ cluster.update!(management_project: management_project)
+ end
+
+ context 'management_project is in same namespace as cluster' do
+ it 'returns cluster for management_project' do
+ expect(base_and_ancestors(management_project)).to eq([cluster])
+ end
+ end
+
+ context 'management_project is in a different namespace from cluster' do
+ let(:management_project) { create(:project) }
+
+ it 'returns nothing' do
+ expect(base_and_ancestors(management_project)).to be_empty
+ end
+ end
+ end
end
context 'cluster has management project' do
@@ -50,16 +72,12 @@ describe Clusters::ClustersHierarchy do
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
- let(:management_project) { create(:project) }
+ let(:management_project) { create(:project, group: group) }
it 'returns clusters for management_project' do
expect(base_and_ancestors(management_project)).to eq([group_cluster])
end
- it 'returns nothing if include_management_project is false' do
- expect(base_and_ancestors(management_project, include_management_project: false)).to be_empty
- end
-
it 'returns clusters for project' do
expect(base_and_ancestors(project)).to eq([project_cluster, group_cluster])
end
@@ -70,17 +88,21 @@ describe Clusters::ClustersHierarchy do
end
context 'project in nested group with clusters at some levels' do
- let!(:child) { create(:cluster, :group, groups: [child_group], management_project: management_project) }
- let!(:ancestor) { create(:cluster, :group, groups: [ancestor_group]) }
+ let!(:child) { create(:cluster, :group, groups: [child_group]) }
+ let!(:ancestor) { create(:cluster, :group, groups: [ancestor_group], management_project: management_project) }
let(:ancestor_group) { create(:group) }
let(:parent_group) { create(:group, parent: ancestor_group) }
let(:child_group) { create(:group, parent: parent_group) }
let(:project) { create(:project, group: child_group) }
- let(:management_project) { create(:project) }
+ let(:management_project) { create(:project, group: child_group) }
+
+ it 'returns clusters for management_project' do
+ expect(base_and_ancestors(management_project)).to eq([ancestor, child])
+ end
it 'returns clusters for management_project' do
- expect(base_and_ancestors(management_project)).to eq([child])
+ expect(base_and_ancestors(management_project, include_management_project: false)).to eq([child, ancestor])
end
it 'returns clusters for project' do
diff --git a/spec/models/clusters/providers/aws_spec.rb b/spec/models/clusters/providers/aws_spec.rb
index ec8159a7ee0..05d6e63288e 100644
--- a/spec/models/clusters/providers/aws_spec.rb
+++ b/spec/models/clusters/providers/aws_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
describe Clusters::Providers::Aws do
it { is_expected.to belong_to(:cluster) }
- it { is_expected.to belong_to(:created_by_user) }
it { is_expected.to validate_length_of(:key_name).is_at_least(1).is_at_most(255) }
it { is_expected.to validate_length_of(:region).is_at_least(1).is_at_most(255) }
@@ -64,13 +63,72 @@ describe Clusters::Providers::Aws do
before do
expect(provider.access_key_id).to be_present
expect(provider.secret_access_key).to be_present
+ expect(provider.session_token).to be_present
end
- it 'removes access_key_id and secret_access_key' do
+ it 'removes access_key_id, secret_access_key and session_token' do
subject
expect(provider.access_key_id).to be_nil
expect(provider.secret_access_key).to be_nil
+ expect(provider.session_token).to be_nil
end
end
+
+ describe '#api_client' do
+ let(:provider) { create(:cluster_provider_aws) }
+ let(:credentials) { double }
+ let(:client) { double }
+
+ subject { provider.api_client }
+
+ before do
+ allow(provider).to receive(:credentials).and_return(credentials)
+
+ expect(Aws::CloudFormation::Client).to receive(:new)
+ .with(credentials: credentials, region: provider.region)
+ .and_return(client)
+ end
+
+ it { is_expected.to eq client }
+ end
+
+ describe '#credentials' do
+ let(:provider) { create(:cluster_provider_aws) }
+ let(:credentials) { double }
+
+ subject { provider.credentials }
+
+ before do
+ expect(Aws::Credentials).to receive(:new)
+ .with(provider.access_key_id, provider.secret_access_key, provider.session_token)
+ .and_return(credentials)
+ end
+
+ it { is_expected.to eq credentials }
+ end
+
+ describe '#created_by_user' do
+ let(:provider) { create(:cluster_provider_aws) }
+
+ subject { provider.created_by_user }
+
+ it { is_expected.to eq provider.cluster.user }
+ end
+
+ describe '#has_rbac_enabled?' do
+ let(:provider) { create(:cluster_provider_aws) }
+
+ subject { provider.has_rbac_enabled? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#knative_pre_installed?' do
+ let(:provider) { create(:cluster_provider_aws) }
+
+ subject { provider.knative_pre_installed? }
+
+ it { is_expected.to be_falsey }
+ end
end
diff --git a/spec/models/clusters/providers/gcp_spec.rb b/spec/models/clusters/providers/gcp_spec.rb
index 15e152519b4..e2fd777d131 100644
--- a/spec/models/clusters/providers/gcp_spec.rb
+++ b/spec/models/clusters/providers/gcp_spec.rb
@@ -78,12 +78,20 @@ describe Clusters::Providers::Gcp do
end
end
- describe '#legacy_abac?' do
- let(:gcp) { build(:cluster_provider_gcp) }
+ describe '#has_rbac_enabled?' do
+ subject { gcp.has_rbac_enabled? }
+
+ context 'when cluster is legacy_abac' do
+ let(:gcp) { create(:cluster_provider_gcp, :abac_enabled) }
+
+ it { is_expected.to be_falsey }
+ end
- subject { gcp }
+ context 'when cluster is not legacy_abac' do
+ let(:gcp) { create(:cluster_provider_gcp) }
- it { is_expected.not_to be_legacy_abac }
+ it { is_expected.to be_truthy }
+ end
end
describe '#knative_pre_installed?' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 95e9b0d0f92..1e1b679a32c 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -522,7 +522,7 @@ describe CommitStatus do
let(:stage) { Ci::Stage.first }
- it 'creates a new stage' do
+ it 'creates a new stage', :sidekiq_might_not_need_inline do
expect { commit_status }.to change { Ci::Stage.count }.by(1)
expect(stage.name).to eq 'test'
@@ -548,7 +548,7 @@ describe CommitStatus do
status: :success)
end
- it 'uses existing stage' do
+ it 'uses existing stage', :sidekiq_might_not_need_inline do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).to eq stage.id
diff --git a/spec/models/concerns/deployment_platform_spec.rb b/spec/models/concerns/deployment_platform_spec.rb
index f99bf18768f..9164c3a75c5 100644
--- a/spec/models/concerns/deployment_platform_spec.rb
+++ b/spec/models/concerns/deployment_platform_spec.rb
@@ -13,7 +13,11 @@ describe DeploymentPlatform do
end
context 'when project is the cluster\'s management project ' do
- let!(:cluster_with_management_project) { create(:cluster, :provided_by_user, management_project: project) }
+ let(:another_project) { create(:project, namespace: project.namespace) }
+
+ let!(:cluster_with_management_project) do
+ create(:cluster, :provided_by_user, projects: [another_project], management_project: project)
+ end
context 'cluster_management_project feature is enabled' do
it 'returns the cluster with management project' do
@@ -66,7 +70,11 @@ describe DeploymentPlatform do
end
context 'when project is the cluster\'s management project ' do
- let!(:cluster_with_management_project) { create(:cluster, :provided_by_user, management_project: project) }
+ let(:another_project) { create(:project, namespace: project.namespace) }
+
+ let!(:cluster_with_management_project) do
+ create(:cluster, :provided_by_user, projects: [another_project], management_project: project)
+ end
context 'cluster_management_project feature is enabled' do
it 'returns the cluster with management project' do
@@ -130,5 +138,13 @@ describe DeploymentPlatform do
end
end
end
+
+ context 'when instance has configured kubernetes cluster' do
+ let!(:instance_cluster) { create(:cluster, :provided_by_user, :instance) }
+
+ it 'returns the Kubernetes platform' do
+ is_expected.to eq(instance_cluster.platform_kubernetes)
+ end
+ end
end
end
diff --git a/spec/models/concerns/from_union_spec.rb b/spec/models/concerns/from_union_spec.rb
index ee427a667c6..735e14b47ec 100644
--- a/spec/models/concerns/from_union_spec.rb
+++ b/spec/models/concerns/from_union_spec.rb
@@ -15,7 +15,7 @@ describe FromUnion do
it 'selects from the results of the UNION' do
query = model.from_union([model.where(id: 1), model.where(id: 2)])
- expect(query.to_sql).to match(/FROM \(SELECT.+UNION.+SELECT.+\) users/m)
+ expect(query.to_sql).to match(/FROM \(\(SELECT.+\)\nUNION\n\(SELECT.+\)\) users/m)
end
it 'supports the use of a custom alias for the sub query' do
@@ -24,7 +24,7 @@ describe FromUnion do
alias_as: 'kittens'
)
- expect(query.to_sql).to match(/FROM \(SELECT.+UNION.+SELECT.+\) kittens/m)
+ expect(query.to_sql).to match(/FROM \(\(SELECT.+\)\nUNION\n\(SELECT.+\)\) kittens/m)
end
it 'supports keeping duplicate rows' do
@@ -34,7 +34,7 @@ describe FromUnion do
)
expect(query.to_sql)
- .to match(/FROM \(SELECT.+UNION ALL.+SELECT.+\) users/m)
+ .to match(/FROM \(\(SELECT.+\)\nUNION ALL\n\(SELECT.+\)\) users/m)
end
end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index e8116f0a301..f7bef9e71e2 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -111,6 +111,34 @@ describe Issuable do
end
end
+ describe '.initialize' do
+ it 'maps the state to the right state_id' do
+ described_class::STATE_ID_MAP.each do |key, value|
+ issuable = MergeRequest.new(state: key)
+
+ expect(issuable.state).to eq(key)
+ expect(issuable.state_id).to eq(value)
+ end
+ end
+
+ it 'maps a string version of the state to the right state_id' do
+ described_class::STATE_ID_MAP.each do |key, value|
+ issuable = MergeRequest.new('state' => key)
+
+ expect(issuable.state).to eq(key)
+ expect(issuable.state_id).to eq(value)
+ end
+ end
+
+ it 'gives preference to state_id if present' do
+ issuable = MergeRequest.new('state' => 'opened',
+ 'state_id' => described_class::STATE_ID_MAP['merged'])
+
+ expect(issuable.state).to eq('merged')
+ expect(issuable.state_id).to eq(described_class::STATE_ID_MAP['merged'])
+ end
+ end
+
describe '#milestone_available?' do
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index f823ac0165f..e8991a3a015 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -177,50 +177,6 @@ describe Noteable do
end
end
- describe "#discussions_to_be_resolved?" do
- context "when discussions are not resolvable" do
- before do
- allow(subject).to receive(:discussions_resolvable?).and_return(false)
- end
-
- it "returns false" do
- expect(subject.discussions_to_be_resolved?).to be false
- end
- end
-
- context "when discussions are resolvable" do
- before do
- allow(subject).to receive(:discussions_resolvable?).and_return(true)
-
- allow(first_discussion).to receive(:resolvable?).and_return(true)
- allow(second_discussion).to receive(:resolvable?).and_return(false)
- allow(third_discussion).to receive(:resolvable?).and_return(true)
- end
-
- context "when all resolvable discussions are resolved" do
- before do
- allow(first_discussion).to receive(:resolved?).and_return(true)
- allow(third_discussion).to receive(:resolved?).and_return(true)
- end
-
- it "returns false" do
- expect(subject.discussions_to_be_resolved?).to be false
- end
- end
-
- context "when some resolvable discussions are not resolved" do
- before do
- allow(first_discussion).to receive(:resolved?).and_return(true)
- allow(third_discussion).to receive(:resolved?).and_return(false)
- end
-
- it "returns true" do
- expect(subject.discussions_to_be_resolved?).to be true
- end
- end
- end
- end
-
describe "#discussions_to_be_resolved" do
before do
allow(first_discussion).to receive(:to_be_resolved?).and_return(true)
diff --git a/spec/models/concerns/redactable_spec.rb b/spec/models/concerns/redactable_spec.rb
index 57c7d2cb767..3f6a2e2410c 100644
--- a/spec/models/concerns/redactable_spec.rb
+++ b/spec/models/concerns/redactable_spec.rb
@@ -7,44 +7,6 @@ describe Redactable do
stub_commonmark_sourcepos_disabled
end
- shared_examples 'model with redactable field' do
- it 'redacts unsubscribe token' do
- model[field] = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
-
- model.save!
-
- expect(model[field]).to eq 'some text /sent_notifications/REDACTED/unsubscribe more text'
- end
-
- it 'ignores not hexadecimal tokens' do
- text = 'some text /sent_notifications/token/unsubscribe more text'
- model[field] = text
-
- model.save!
-
- expect(model[field]).to eq text
- end
-
- it 'ignores not matching texts' do
- text = 'some text /sent_notifications/.*/unsubscribe more text'
- model[field] = text
-
- model.save!
-
- expect(model[field]).to eq text
- end
-
- it 'redacts the field when saving the model before creating markdown cache' do
- model[field] = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
-
- model.save!
-
- expected = 'some text /sent_notifications/REDACTED/unsubscribe more text'
- expect(model[field]).to eq expected
- expect(model["#{field}_html"]).to eq "<p dir=\"auto\">#{expected}</p>"
- end
- end
-
context 'when model is an issue' do
it_behaves_like 'model with redactable field' do
let(:model) { create(:issue) }
diff --git a/spec/models/concerns/subscribable_spec.rb b/spec/models/concerns/subscribable_spec.rb
index 2f88adf08dd..f189cd7633c 100644
--- a/spec/models/concerns/subscribable_spec.rb
+++ b/spec/models/concerns/subscribable_spec.rb
@@ -133,4 +133,60 @@ describe Subscribable, 'Subscribable' do
end
end
end
+
+ describe '#set_subscription' do
+ shared_examples 'setting subscriptions' do
+ context 'when desired_state is set to true' do
+ context 'when a user is subscribed to the resource' do
+ it 'keeps the user subscribed' do
+ resource.subscriptions.create(user: user_1, subscribed: true, project: resource_project)
+
+ resource.set_subscription(user_1, true, resource_project)
+
+ expect(resource.subscribed?(user_1, resource_project)).to be_truthy
+ end
+ end
+
+ context 'when a user is not subscribed to the resource' do
+ it 'subscribes the user to the resource' do
+ expect { resource.set_subscription(user_1, true, resource_project) }
+ .to change { resource.subscribed?(user_1, resource_project) }
+ .from(false).to(true)
+ end
+ end
+ end
+
+ context 'when desired_state is set to false' do
+ context 'when a user is subscribed to the resource' do
+ it 'unsubscribes the user from the resource' do
+ resource.subscriptions.create(user: user_1, subscribed: true, project: resource_project)
+
+ expect { resource.set_subscription(user_1, false, resource_project) }
+ .to change { resource.subscribed?(user_1, resource_project) }
+ .from(true).to(false)
+ end
+ end
+
+ context 'when a user is not subscribed to the resource' do
+ it 'keeps the user unsubscribed' do
+ resource.set_subscription(user_1, false, resource_project)
+
+ expect(resource.subscribed?(user_1, resource_project)).to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'without project' do
+ let(:resource_project) { nil }
+
+ it_behaves_like 'setting subscriptions'
+ end
+
+ context 'with project' do
+ let(:resource_project) { project }
+
+ it_behaves_like 'setting subscriptions'
+ end
+ end
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index eea539746a5..0a3065140bf 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -235,4 +235,36 @@ describe ContainerRepository do
expect(repository).not_to be_persisted
end
end
+
+ describe '.for_group_and_its_subgroups' do
+ subject { described_class.for_group_and_its_subgroups(test_group) }
+
+ context 'in a group' do
+ let(:test_group) { group }
+
+ it { is_expected.to contain_exactly(repository) }
+ end
+
+ context 'with a subgroup' do
+ let(:test_group) { create(:group) }
+ let(:another_project) { create(:project, path: 'test', group: test_group) }
+
+ let(:another_repository) do
+ create(:container_repository, name: 'my_image', project: another_project)
+ end
+
+ before do
+ group.parent = test_group
+ group.save
+ end
+
+ it { is_expected.to contain_exactly(repository, another_repository) }
+ end
+
+ context 'group without container_repositories' do
+ let(:test_group) { create(:group) }
+
+ it { is_expected.to eq([]) }
+ end
+ end
end
diff --git a/spec/models/deployment_merge_request_spec.rb b/spec/models/deployment_merge_request_spec.rb
new file mode 100644
index 00000000000..fd5be52d47c
--- /dev/null
+++ b/spec/models/deployment_merge_request_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DeploymentMergeRequest do
+ let(:mr) { create(:merge_request, :merged) }
+ let(:deployment) { create(:deployment, :success, project: project) }
+ let(:project) { mr.project }
+
+ subject { described_class.new(deployment: deployment, merge_request: mr) }
+
+ it { is_expected.to belong_to(:deployment).required }
+ it { is_expected.to belong_to(:merge_request).required }
+end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 3a0b3c46ad0..52c19d4814c 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -10,6 +10,8 @@ describe Deployment do
it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster') }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:deployable) }
+ it { is_expected.to have_many(:deployment_merge_requests) }
+ it { is_expected.to have_many(:merge_requests).through(:deployment_merge_requests) }
it { is_expected.to delegate_method(:name).to(:environment).with_prefix }
it { is_expected.to delegate_method(:commit).to(:project) }
@@ -361,4 +363,82 @@ describe Deployment do
.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ describe '#previous_deployment' do
+ it 'returns the previous deployment' do
+ deploy1 = create(:deployment)
+ deploy2 = create(
+ :deployment,
+ project: deploy1.project,
+ environment: deploy1.environment
+ )
+
+ expect(deploy2.previous_deployment).to eq(deploy1)
+ end
+ end
+
+ describe '#link_merge_requests' do
+ it 'links merge requests with a deployment' do
+ deploy = create(:deployment)
+ mr1 = create(
+ :merge_request,
+ :merged,
+ target_project: deploy.project,
+ source_project: deploy.project
+ )
+
+ mr2 = create(
+ :merge_request,
+ :merged,
+ target_project: deploy.project,
+ source_project: deploy.project
+ )
+
+ deploy.link_merge_requests(deploy.project.merge_requests)
+
+ expect(deploy.merge_requests).to include(mr1, mr2)
+ end
+ end
+
+ describe '#previous_environment_deployment' do
+ it 'returns the previous deployment of the same environment' do
+ deploy1 = create(:deployment, :success, ref: 'v1.0.0')
+ deploy2 = create(
+ :deployment,
+ :success,
+ project: deploy1.project,
+ environment: deploy1.environment,
+ ref: 'v1.0.1'
+ )
+
+ expect(deploy2.previous_environment_deployment).to eq(deploy1)
+ end
+
+ it 'ignores deployments that were not successful' do
+ deploy1 = create(:deployment, :failed, ref: 'v1.0.0')
+ deploy2 = create(
+ :deployment,
+ :success,
+ project: deploy1.project,
+ environment: deploy1.environment,
+ ref: 'v1.0.1'
+ )
+
+ expect(deploy2.previous_environment_deployment).to be_nil
+ end
+
+ it 'ignores deployments for different environments' do
+ deploy1 = create(:deployment, :success, ref: 'v1.0.0')
+ preprod = create(:environment, project: deploy1.project, name: 'preprod')
+ deploy2 = create(
+ :deployment,
+ :success,
+ project: deploy1.project,
+ environment: preprod,
+ ref: 'v1.0.1'
+ )
+
+ expect(deploy2.previous_environment_deployment).to be_nil
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 786f3b832c4..47e39e5fbe5 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe Environment, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
using RSpec::Parameterized::TableSyntax
+ include RepoHelpers
let(:project) { create(:project, :stubbed_repository) }
subject(:environment) { create(:environment, project: project) }
@@ -259,7 +260,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
let(:head_commit) { project.commit }
let(:commit) { project.commit.parent }
- it 'returns deployment id for the environment' do
+ it 'returns deployment id for the environment', :sidekiq_might_not_need_inline do
expect(environment.first_deployment_for(commit.id)).to eq deployment1
end
@@ -267,7 +268,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
expect(environment.first_deployment_for(head_commit.id)).to eq nil
end
- it 'returns a UTF-8 ref' do
+ it 'returns a UTF-8 ref', :sidekiq_might_not_need_inline do
expect(environment.first_deployment_for(commit.id).ref).to be_utf8
end
end
@@ -505,6 +506,14 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ context 'when there is a deployment record with failed status' do
+ let!(:deployment) { create(:deployment, :failed, environment: environment) }
+
+ it 'returns the previous deployment' do
+ is_expected.to eq(previous_deployment)
+ end
+ end
+
context 'when there is a deployment record with success status' do
let!(:deployment) { create(:deployment, :success, environment: environment) }
@@ -515,6 +524,131 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe '#last_visible_deployment' do
+ subject { environment.last_visible_deployment }
+
+ before do
+ allow_any_instance_of(Deployment).to receive(:create_ref)
+ end
+
+ context 'when there is an old deployment record' do
+ let!(:previous_deployment) { create(:deployment, :success, environment: environment) }
+
+ context 'when there is a deployment record with created status' do
+ let!(:deployment) { create(:deployment, environment: environment) }
+
+ it { is_expected.to eq(previous_deployment) }
+ end
+
+ context 'when there is a deployment record with running status' do
+ let!(:deployment) { create(:deployment, :running, environment: environment) }
+
+ it { is_expected.to eq(deployment) }
+ end
+
+ context 'when there is a deployment record with success status' do
+ let!(:deployment) { create(:deployment, :success, environment: environment) }
+
+ it { is_expected.to eq(deployment) }
+ end
+
+ context 'when there is a deployment record with failed status' do
+ let!(:deployment) { create(:deployment, :failed, environment: environment) }
+
+ it { is_expected.to eq(deployment) }
+ end
+
+ context 'when there is a deployment record with canceled status' do
+ let!(:deployment) { create(:deployment, :canceled, environment: environment) }
+
+ it { is_expected.to eq(deployment) }
+ end
+ end
+ end
+
+ describe '#last_visible_pipeline' do
+ let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let(:environment) { create(:environment, project: project) }
+ let(:commit) { project.commit }
+
+ let(:success_pipeline) do
+ create(:ci_pipeline, :success, project: project, user: user, sha: commit.sha)
+ end
+
+ let(:failed_pipeline) do
+ create(:ci_pipeline, :failed, project: project, user: user, sha: commit.sha)
+ end
+
+ it 'uses the last deployment even if it failed' do
+ pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
+ ci_build = create(:ci_build, project: project, pipeline: pipeline)
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build, sha: commit.sha)
+
+ last_pipeline = environment.last_visible_pipeline
+
+ expect(last_pipeline).to eq(pipeline)
+ end
+
+ it 'returns nil if there is no deployment' do
+ create(:ci_build, project: project, pipeline: success_pipeline)
+
+ expect(environment.last_visible_pipeline).to be_nil
+ end
+
+ it 'does not return an invisible pipeline' do
+ failed_pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
+ ci_build_a = create(:ci_build, project: project, pipeline: failed_pipeline)
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_a, sha: commit.sha)
+ pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
+ ci_build_b = create(:ci_build, project: project, pipeline: pipeline)
+ create(:deployment, :created, project: project, environment: environment, deployable: ci_build_b, sha: commit.sha)
+
+ last_pipeline = environment.last_visible_pipeline
+
+ expect(last_pipeline).to eq(failed_pipeline)
+ end
+
+ context 'for the environment' do
+ it 'returns the last pipeline' do
+ pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
+ ci_build = create(:ci_build, project: project, pipeline: pipeline)
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build, sha: commit.sha)
+
+ last_pipeline = environment.last_visible_pipeline
+
+ expect(last_pipeline).to eq(pipeline)
+ end
+
+ context 'with multiple deployments' do
+ it 'returns the last pipeline' do
+ pipeline_a = create(:ci_pipeline, project: project, user: user)
+ pipeline_b = create(:ci_pipeline, project: project, user: user)
+ ci_build_a = create(:ci_build, project: project, pipeline: pipeline_a)
+ ci_build_b = create(:ci_build, project: project, pipeline: pipeline_b)
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_b)
+
+ last_pipeline = environment.last_visible_pipeline
+
+ expect(last_pipeline).to eq(pipeline_b)
+ end
+ end
+
+ context 'with multiple pipelines' do
+ it 'returns the last pipeline' do
+ create(:ci_build, project: project, pipeline: success_pipeline)
+ ci_build_b = create(:ci_build, project: project, pipeline: failed_pipeline)
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b, sha: commit.sha)
+
+ last_pipeline = environment.last_visible_pipeline
+
+ expect(last_pipeline).to eq(failed_pipeline)
+ end
+ end
+ end
+ end
+
describe '#has_terminals?' do
subject { environment.has_terminals? }
@@ -610,6 +744,12 @@ describe Environment, :use_clean_rails_memory_store_caching do
allow(environment).to receive(:deployment_platform).and_return(double)
end
+ context 'reactive cache configuration' do
+ it 'does not continue to spawn jobs' do
+ expect(described_class.reactive_cache_lifetime).to be < described_class.reactive_cache_refresh_interval
+ end
+ end
+
context 'reactive cache is empty' do
before do
stub_reactive_cache(environment, nil)
@@ -727,6 +867,51 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe '#prometheus_status' do
+ context 'when a cluster is present' do
+ context 'when a deployment platform is present' do
+ let(:cluster) { create(:cluster, :provided_by_user, :project) }
+ let(:environment) { create(:environment, project: cluster.project) }
+
+ subject { environment.prometheus_status }
+
+ context 'when the prometheus application status is :updating' do
+ let!(:prometheus) { create(:clusters_applications_prometheus, :updating, cluster: cluster) }
+
+ it { is_expected.to eq(:updating) }
+ end
+
+ context 'when the prometheus application state is :updated' do
+ let!(:prometheus) { create(:clusters_applications_prometheus, :updated, cluster: cluster) }
+
+ it { is_expected.to eq(:updated) }
+ end
+
+ context 'when the prometheus application is not installed' do
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when a deployment platform is not present' do
+ let(:cluster) { create(:cluster, :project) }
+ let(:environment) { create(:environment, project: cluster.project) }
+
+ subject { environment.prometheus_status }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when a cluster is not present' do
+ let(:project) { create(:project, :stubbed_repository) }
+ let(:environment) { create(:environment, project: project) }
+
+ subject { environment.prometheus_status }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#additional_metrics' do
let(:project) { create(:prometheus_project) }
let(:metric_params) { [] }
diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb
index 01d331f518b..eea81d7c128 100644
--- a/spec/models/environment_status_spec.rb
+++ b/spec/models/environment_status_spec.rb
@@ -113,7 +113,7 @@ describe EnvironmentStatus do
head_pipeline: pipeline)
end
- it 'returns environment status' do
+ it 'returns environment status', :sidekiq_might_not_need_inline do
expect(subject.count).to eq(1)
expect(subject[0].environment).to eq(environment)
expect(subject[0].merge_request).to eq(merge_request)
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 21e381d9fb7..dbd3f8ffab3 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -208,6 +208,28 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
expect(sentry_client).to have_received(:list_issues)
end
end
+
+ context 'when sentry client raises Sentry::Client::ResponseInvalidSizeError' do
+ let(:sentry_client) { spy(:sentry_client) }
+ let(:error_msg) {"Sentry API response is too big. Limit is #{Gitlab::Utils::DeepSize.human_default_max_size}."}
+
+ before do
+ synchronous_reactive_cache(subject)
+
+ allow(subject).to receive(:sentry_client).and_return(sentry_client)
+ allow(sentry_client).to receive(:list_issues).with(opts)
+ .and_raise(Sentry::Client::ResponseInvalidSizeError, error_msg)
+ end
+
+ it 'returns error' do
+ expect(result).to eq(
+ error: error_msg,
+ error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_INVALID_SIZE
+ )
+ expect(subject).to have_received(:sentry_client)
+ expect(sentry_client).to have_received(:list_issues)
+ end
+ end
end
describe '#list_sentry_projects' do
diff --git a/spec/models/evidence_spec.rb b/spec/models/evidence_spec.rb
index 00788c2c391..8f534517fc1 100644
--- a/spec/models/evidence_spec.rb
+++ b/spec/models/evidence_spec.rb
@@ -27,7 +27,7 @@ describe Evidence do
let(:release) { create(:release, project: project, name: nil) }
it 'creates a valid JSON object' do
- expect(release.name).to be_nil
+ expect(release.name).to eq(release.tag)
expect(summary_json).to match_schema(schema_file)
end
end
diff --git a/spec/models/grafana_integration_spec.rb b/spec/models/grafana_integration_spec.rb
index f8973097a40..615865e17b9 100644
--- a/spec/models/grafana_integration_spec.rb
+++ b/spec/models/grafana_integration_spec.rb
@@ -34,5 +34,36 @@ describe GrafanaIntegration do
internal_url
).for(:grafana_url)
end
+
+ it 'disallows non-booleans in enabled column' do
+ is_expected.not_to allow_value(
+ nil
+ ).for(:enabled)
+ end
+
+ it 'allows booleans in enabled column' do
+ is_expected.to allow_value(
+ true,
+ false
+ ).for(:enabled)
+ end
+ end
+
+ describe '.client' do
+ subject(:grafana_integration) { create(:grafana_integration) }
+
+ context 'with grafana integration disabled' do
+ it 'returns a grafana client' do
+ expect(grafana_integration.client).to be_an_instance_of(::Grafana::Client)
+ end
+ end
+
+ context 'with grafana integration enabled' do
+ it 'returns nil' do
+ grafana_integration.update(enabled: false)
+
+ expect(grafana_integration.client).to be(nil)
+ end
+ end
end
end
diff --git a/spec/models/group_group_link_spec.rb b/spec/models/group_group_link_spec.rb
new file mode 100644
index 00000000000..e4ad5703a10
--- /dev/null
+++ b/spec/models/group_group_link_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GroupGroupLink do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:shared_group) { create(:group) }
+ let_it_be(:group_group_link) do
+ create(:group_group_link, shared_group: shared_group,
+ shared_with_group: group)
+ end
+
+ describe 'relations' do
+ it { is_expected.to belong_to(:shared_group) }
+ it { is_expected.to belong_to(:shared_with_group) }
+ end
+
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:shared_group) }
+
+ it do
+ is_expected.to(
+ validate_uniqueness_of(:shared_group_id)
+ .scoped_to(:shared_with_group_id)
+ .with_message('The group has already been shared with this group'))
+ end
+
+ it { is_expected.to validate_presence_of(:shared_with_group) }
+ it { is_expected.to validate_presence_of(:group_access) }
+
+ it do
+ is_expected.to(
+ validate_inclusion_of(:group_access).in_array(Gitlab::Access.values))
+ end
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 520421ac5e3..3fa9d71cc7d 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -525,6 +525,128 @@ describe Group do
it { expect(subject.parent).to be_kind_of(described_class) }
end
+ describe '#max_member_access_for_user' do
+ context 'group shared with another group' do
+ let(:parent_group_user) { create(:user) }
+ let(:group_user) { create(:user) }
+ let(:child_group_user) { create(:user) }
+
+ let_it_be(:group_parent) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: group_parent) }
+ let_it_be(:group_child) { create(:group, :private, parent: group) }
+
+ let_it_be(:shared_group_parent) { create(:group, :private) }
+ let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
+
+ before do
+ group_parent.add_owner(parent_group_user)
+ group.add_owner(group_user)
+ group_child.add_owner(child_group_user)
+
+ create(:group_group_link, { shared_with_group: group,
+ shared_group: shared_group,
+ group_access: GroupMember::DEVELOPER })
+ end
+
+ context 'when feature flag share_group_with_group is enabled' do
+ before do
+ stub_feature_flags(share_group_with_group: true)
+ end
+
+ context 'with user in the group' do
+ let(:user) { group_user }
+
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::DEVELOPER)
+ expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+
+ context 'with user in the parent group' do
+ let(:user) { parent_group_user }
+
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+
+ context 'with user in the child group' do
+ let(:user) { child_group_user }
+
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+ end
+
+ context 'when feature flag share_group_with_group is disabled' do
+ before do
+ stub_feature_flags(share_group_with_group: false)
+ end
+
+ context 'with user in the group' do
+ let(:user) { group_user }
+
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+
+ context 'with user in the parent group' do
+ let(:user) { parent_group_user }
+
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+
+ context 'with user in the child group' do
+ let(:user) { child_group_user }
+
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+ end
+ end
+
+ context 'multiple groups shared with group' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group, :private) }
+ let(:shared_group_parent) { create(:group, :private) }
+ let(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+
+ before do
+ stub_feature_flags(share_group_with_group: true)
+
+ group.add_owner(user)
+
+ create(:group_group_link, { shared_with_group: group,
+ shared_group: shared_group,
+ group_access: GroupMember::DEVELOPER })
+ create(:group_group_link, { shared_with_group: group,
+ shared_group: shared_group_parent,
+ group_access: GroupMember::MAINTAINER })
+ end
+
+ it 'returns correct access level' do
+ expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+ end
+
describe '#members_with_parents' do
let!(:group) { create(:group, :nested) }
let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index a4d202dc4f8..94f1b0cba2e 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -16,7 +16,7 @@ describe SystemHook do
end
end
- describe "execute" do
+ describe "execute", :sidekiq_might_not_need_inline do
let(:system_hook) { create(:system_hook) }
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 18a1a30eee5..0f78cb4d9b1 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -423,6 +423,19 @@ describe Issue do
issue = create(:issue, title: 'testing-issue', confidential: true)
expect(issue.to_branch_name).to match /confidential-issue\z/
end
+
+ context 'issue title longer than 100 characters' do
+ let(:issue) { create(:issue, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
+
+ it "truncates branch name to at most 100 characters" do
+ expect(issue.to_branch_name.length).to be <= 100
+ end
+
+ it "truncates dangling parts of the branch name" do
+ # 100 characters would've got us "999-lorem...lacus-custom-fri".
+ expect(issue.to_branch_name).to eq("999-lorem-ipsum-dolor-sit-amet-consectetur-adipiscing-elit-mauris-sit-amet-ipsum-id-lacus-custom")
+ end
+ end
end
describe '#can_be_worked_on?' do
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index 47cae5cf197..44445429d3e 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -3,6 +3,18 @@
require 'spec_helper'
describe LfsObject do
+ context 'scopes' do
+ describe '.not_existing_in_project' do
+ it 'contains only lfs objects not linked to the project' do
+ project = create(:project)
+ create(:lfs_objects_project, project: project)
+ other_lfs_object = create(:lfs_object)
+
+ expect(described_class.not_linked_to_project(project)).to contain_exactly(other_lfs_object)
+ end
+ end
+ end
+
it 'has a distinct has_many :projects relation through lfs_objects_projects' do
lfs_object = create(:lfs_object)
project = create(:project)
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index b86663fd7d9..0f7f68e0b38 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -378,6 +378,14 @@ describe MergeRequestDiff do
expect(diff_with_commits.commit_shas).not_to be_empty
expect(diff_with_commits.commit_shas).to all(match(/\h{40}/))
end
+
+ context 'with limit attribute' do
+ it 'returns limited number of shas' do
+ expect(diff_with_commits.commit_shas(limit: 2).size).to eq(2)
+ expect(diff_with_commits.commit_shas(limit: 100).size).to eq(29)
+ expect(diff_with_commits.commit_shas.size).to eq(29)
+ end
+ end
end
describe '#compare_with' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index ad79bee8801..b5aa05fd8b4 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -283,6 +283,16 @@ describe MergeRequest do
end
end
+ describe '.by_merge_commit_sha' do
+ it 'returns merge requests that match the given merge commit' do
+ mr = create(:merge_request, :merged, merge_commit_sha: '123abc')
+
+ create(:merge_request, :merged, merge_commit_sha: '123def')
+
+ expect(described_class.by_merge_commit_sha('123abc')).to eq([mr])
+ end
+ end
+
describe '.in_projects' do
it 'returns the merge requests for a set of projects' do
expect(described_class.in_projects(Project.all)).to eq([subject])
@@ -1190,7 +1200,7 @@ describe MergeRequest do
context 'diverged on fork' do
subject(:merge_request_fork_with_divergence) { create(:merge_request, :diverged, source_project: forked_project, target_project: project) }
- it 'counts commits that are on target branch but not on source branch' do
+ it 'counts commits that are on target branch but not on source branch', :sidekiq_might_not_need_inline do
expect(subject.diverged_commits_count).to eq(29)
end
end
@@ -1251,13 +1261,49 @@ describe MergeRequest do
end
describe '#commit_shas' do
- before do
- allow(subject.merge_request_diff).to receive(:commit_shas)
- .and_return(['sha1'])
+ context 'persisted merge request' do
+ context 'with a limit' do
+ it 'returns a limited number of commit shas' do
+ expect(subject.commit_shas(limit: 2)).to eq(%w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
+ end
+ end
+
+ context 'without a limit' do
+ it 'returns all commit shas of the merge request diff' do
+ expect(subject.commit_shas.size).to eq(29)
+ end
+ end
end
- it 'delegates to merge request diff' do
- expect(subject.commit_shas).to eq ['sha1']
+ context 'new merge request' do
+ subject { build(:merge_request) }
+
+ context 'compare commits' do
+ before do
+ subject.compare_commits = [
+ double(sha: 'sha1'), double(sha: 'sha2')
+ ]
+ end
+
+ context 'without a limit' do
+ it 'returns all shas of compare commits' do
+ expect(subject.commit_shas).to eq(%w[sha2 sha1])
+ end
+ end
+
+ context 'with a limit' do
+ it 'returns a limited number of shas' do
+ expect(subject.commit_shas(limit: 1)).to eq(['sha2'])
+ end
+ end
+ end
+
+ it 'returns diff_head_sha as an array' do
+ expect(subject.commit_shas).to eq([subject.diff_head_sha])
+ expect(subject.commit_shas(limit: 2)).to eq([subject.diff_head_sha])
+ end
end
end
@@ -1674,6 +1720,63 @@ describe MergeRequest do
end
end
+ describe '#find_exposed_artifacts' do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, :with_test_reports, source_project: project) }
+ let(:pipeline) { merge_request.head_pipeline }
+
+ subject { merge_request.find_exposed_artifacts }
+
+ context 'when head pipeline has exposed artifacts' do
+ let!(:job) do
+ create(:ci_build, options: { artifacts: { expose_as: 'artifact', paths: ['ci_artifacts.txt'] } }, pipeline: pipeline)
+ end
+
+ let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ context 'when reactive cache worker is parsing results asynchronously' do
+ it 'returns status' do
+ expect(subject[:status]).to eq(:parsing)
+ end
+ end
+
+ context 'when reactive cache worker is inline' do
+ before do
+ synchronous_reactive_cache(merge_request)
+ end
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ end
+
+ context 'when an error occurrs' do
+ before do
+ expect_next_instance_of(Ci::FindExposedArtifactsService) do |service|
+ expect(service).to receive(:for_pipeline)
+ .and_raise(StandardError.new)
+ end
+ end
+
+ it 'returns an error message' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+
+ context 'when cached results is not latest' do
+ before do
+ allow_next_instance_of(Ci::GenerateExposedArtifactsReportService) do |service|
+ allow(service).to receive(:latest?).and_return(false)
+ end
+ end
+
+ it 'raises and InvalidateReactiveCache error' do
+ expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
+ end
+ end
+ end
+ end
+ end
+
describe '#compare_test_reports' do
subject { merge_request.compare_test_reports }
@@ -1831,7 +1934,7 @@ describe MergeRequest do
context 'when the MR has been merged' do
before do
MergeRequests::MergeService
- .new(subject.target_project, subject.author)
+ .new(subject.target_project, subject.author, { sha: subject.diff_head_sha })
.execute(subject)
end
@@ -2081,6 +2184,13 @@ describe MergeRequest do
expect { execute }.to raise_error(ActiveRecord::StaleObjectError)
end
+
+ it "raises ActiveRecord::LockWaitTimeout after 6 tries" do
+ expect(merge_request).to receive(:with_lock).exactly(6).times.and_raise(ActiveRecord::LockWaitTimeout)
+ expect(RebaseWorker).not_to receive(:perform_async)
+
+ expect { execute }.to raise_error(MergeRequest::RebaseLockTimeout)
+ end
end
describe '#mergeable?' do
@@ -2103,6 +2213,50 @@ describe MergeRequest do
end
end
+ describe '#check_mergeability' do
+ let(:mergeability_service) { double }
+
+ before do
+ allow(MergeRequests::MergeabilityCheckService).to receive(:new) do
+ mergeability_service
+ end
+ end
+
+ context 'if the merge status is unchecked' do
+ before do
+ subject.mark_as_unchecked!
+ end
+
+ it 'executes MergeabilityCheckService' do
+ expect(mergeability_service).to receive(:execute)
+
+ subject.check_mergeability
+ end
+ end
+
+ context 'if the merge status is checked' do
+ context 'and feature flag is enabled' do
+ it 'executes MergeabilityCheckService' do
+ expect(mergeability_service).not_to receive(:execute)
+
+ subject.check_mergeability
+ end
+ end
+
+ context 'and feature flag is disabled' do
+ before do
+ stub_feature_flags(merge_requests_conditional_mergeability_check: false)
+ end
+
+ it 'does not execute MergeabilityCheckService' do
+ expect(mergeability_service).to receive(:execute)
+
+ subject.check_mergeability
+ end
+ end
+ end
+ end
+
describe '#mergeable_state?' do
let(:project) { create(:project, :repository) }
@@ -2203,7 +2357,7 @@ describe MergeRequest do
allow(subject).to receive(:head_pipeline) { pipeline }
end
- it { expect(subject.mergeable_ci_state?).to be_truthy }
+ it { expect(subject.mergeable_ci_state?).to be_falsey }
end
context 'when no pipeline is associated' do
@@ -2327,7 +2481,7 @@ describe MergeRequest do
create(:deployment, :success, environment: source_environment, ref: 'feature', sha: merge_request.diff_head_sha)
end
- it 'selects deployed environments' do
+ it 'selects deployed environments', :sidekiq_might_not_need_inline do
expect(merge_request.environments_for(user)).to contain_exactly(source_environment)
end
@@ -2338,7 +2492,7 @@ describe MergeRequest do
create(:deployment, :success, environment: target_environment, tag: true, sha: merge_request.diff_head_sha)
end
- it 'selects deployed environments' do
+ it 'selects deployed environments', :sidekiq_might_not_need_inline do
expect(merge_request.environments_for(user)).to contain_exactly(source_environment, target_environment)
end
end
@@ -2689,7 +2843,7 @@ describe MergeRequest do
describe '#mergeable_with_quick_action?' do
def create_pipeline(status)
- pipeline = create(:ci_pipeline_with_one_job,
+ pipeline = create(:ci_pipeline,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
@@ -2804,9 +2958,9 @@ describe MergeRequest do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
- let!(:first_pipeline) { create(:ci_pipeline_without_jobs, pipeline_arguments) }
- let!(:last_pipeline) { create(:ci_pipeline_without_jobs, pipeline_arguments) }
- let!(:last_pipeline_with_other_ref) { create(:ci_pipeline_without_jobs, pipeline_arguments.merge(ref: 'other')) }
+ let!(:first_pipeline) { create(:ci_pipeline, pipeline_arguments) }
+ let!(:last_pipeline) { create(:ci_pipeline, pipeline_arguments) }
+ let!(:last_pipeline_with_other_ref) { create(:ci_pipeline, pipeline_arguments.merge(ref: 'other')) }
it 'returns latest pipeline for the target branch' do
expect(merge_request.base_pipeline).to eq(last_pipeline)
@@ -2932,7 +3086,7 @@ describe MergeRequest do
describe '#unlock_mr' do
subject { create(:merge_request, state: 'locked', merge_jid: 123) }
- it 'updates merge request head pipeline and sets merge_jid to nil' do
+ it 'updates merge request head pipeline and sets merge_jid to nil', :sidekiq_might_not_need_inline do
pipeline = create(:ci_empty_pipeline, project: subject.project, ref: subject.source_branch, sha: subject.source_branch_sha)
subject.unlock_mr
@@ -3304,7 +3458,7 @@ describe MergeRequest do
end
end
- describe '.with_open_merge_when_pipeline_succeeds' do
+ describe '.with_auto_merge_enabled' do
let!(:project) { create(:project) }
let!(:fork) { fork_project(project) }
let!(:merge_request1) do
@@ -3316,15 +3470,6 @@ describe MergeRequest do
source_branch: 'feature-1')
end
- let!(:merge_request2) do
- create(:merge_request,
- :merge_when_pipeline_succeeds,
- target_project: project,
- target_branch: 'master',
- source_project: fork,
- source_branch: 'fork-feature-1')
- end
-
let!(:merge_request4) do
create(:merge_request,
target_project: project,
@@ -3333,10 +3478,73 @@ describe MergeRequest do
source_branch: 'fork-feature-2')
end
- let(:query) { described_class.with_open_merge_when_pipeline_succeeds }
+ let(:query) { described_class.with_auto_merge_enabled }
- it { expect(query).to contain_exactly(merge_request1, merge_request2) }
+ it { expect(query).to contain_exactly(merge_request1) }
end
it_behaves_like 'versioned description'
+
+ describe '#commits' do
+ context 'persisted merge request' do
+ context 'with a limit' do
+ it 'returns a limited number of commits' do
+ expect(subject.commits(limit: 2).map(&:sha)).to eq(%w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
+ expect(subject.commits(limit: 3).map(&:sha)).to eq(%w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ 1b12f15a11fc6e62177bef08f47bc7b5ce50b141
+ ])
+ end
+ end
+
+ context 'without a limit' do
+ it 'returns all commits of the merge request diff' do
+ expect(subject.commits.size).to eq(29)
+ end
+ end
+ end
+
+ context 'new merge request' do
+ subject { build(:merge_request) }
+
+ context 'compare commits' do
+ let(:first_commit) { double }
+ let(:second_commit) { double }
+
+ before do
+ subject.compare_commits = [
+ first_commit, second_commit
+ ]
+ end
+
+ context 'without a limit' do
+ it 'returns all the compare commits' do
+ expect(subject.commits.to_a).to eq([second_commit, first_commit])
+ end
+ end
+
+ context 'with a limit' do
+ it 'returns a limited number of commits' do
+ expect(subject.commits(limit: 1).to_a).to eq([second_commit])
+ end
+ end
+ end
+ end
+ end
+
+ describe '#recent_commits' do
+ before do
+ stub_const("#{MergeRequestDiff}::COMMITS_SAFE_SIZE", 2)
+ end
+
+ it 'returns the safe number of commits' do
+ expect(subject.recent_commits.map(&:sha)).to eq(%w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
+ end
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 120ba67f328..45cd2768708 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -55,6 +55,17 @@ describe Milestone do
end
end
+ describe 'title' do
+ it { is_expected.to validate_presence_of(:title) }
+
+ it 'is invalid if title would be empty after sanitation' do
+ milestone = build(:milestone, project: project, title: '<img src=x onerror=prompt(1)>')
+
+ expect(milestone).not_to be_valid
+ expect(milestone.errors[:title]).to include("can't be blank")
+ end
+ end
+
describe 'milestone_releases' do
let(:milestone) { build(:milestone, project: project) }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 1e06d0fd7b9..c93e6aafd75 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -281,6 +281,44 @@ describe Namespace do
end
end
+ shared_examples 'move_dir without repository storage feature' do |storage_version|
+ let(:namespace) { create(:namespace) }
+ let(:gitlab_shell) { namespace.gitlab_shell }
+ let!(:project) { create(:project_empty_repo, namespace: namespace, storage_version: storage_version) }
+
+ it 'calls namespace service' do
+ expect(gitlab_shell).to receive(:add_namespace).and_return(true)
+ expect(gitlab_shell).to receive(:mv_namespace).and_return(true)
+
+ namespace.move_dir
+ end
+ end
+
+ shared_examples 'move_dir with repository storage feature' do |storage_version|
+ let(:namespace) { create(:namespace) }
+ let(:gitlab_shell) { namespace.gitlab_shell }
+ let!(:project) { create(:project_empty_repo, namespace: namespace, storage_version: storage_version) }
+
+ it 'does not call namespace service' do
+ expect(gitlab_shell).not_to receive(:add_namespace)
+ expect(gitlab_shell).not_to receive(:mv_namespace)
+
+ namespace.move_dir
+ end
+ end
+
+ context 'project is without repository storage feature' do
+ [nil, 0].each do |storage_version|
+ it_behaves_like 'move_dir without repository storage feature', storage_version
+ end
+ end
+
+ context 'project has repository storage feature' do
+ [1, 2].each do |storage_version|
+ it_behaves_like 'move_dir with repository storage feature', storage_version
+ end
+ end
+
context 'with subgroups' do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:new_parent) { create(:group, name: 'new_parent', path: 'new_parent') }
diff --git a/spec/models/personal_snippet_spec.rb b/spec/models/personal_snippet_spec.rb
new file mode 100644
index 00000000000..276c8e22731
--- /dev/null
+++ b/spec/models/personal_snippet_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PersonalSnippet do
+ describe '#embeddable?' do
+ [
+ { snippet: :public, embeddable: true },
+ { snippet: :internal, embeddable: false },
+ { snippet: :private, embeddable: false }
+ ].each do |combination|
+ it 'returns true when snippet is public' do
+ snippet = build(:personal_snippet, combination[:snippet])
+
+ expect(snippet.embeddable?).to eq(combination[:embeddable])
+ end
+ end
+ end
+end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index 8a847bbe24e..0b4dcc62ff6 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -27,7 +27,7 @@ describe ProjectImportState, type: :model do
expect(project.wiki.repository).to receive(:after_import).and_call_original
end
- it 'imports a project' do
+ it 'imports a project', :sidekiq_might_not_need_inline do
expect(RepositoryImportWorker).to receive(:perform_async).and_call_original
expect { import_state.schedule }.to change { import_state.jid }
diff --git a/spec/models/project_services/chat_message/pipeline_message_spec.rb b/spec/models/project_services/chat_message/pipeline_message_spec.rb
index cf7c7bf7e61..366ef01924e 100644
--- a/spec/models/project_services/chat_message/pipeline_message_spec.rb
+++ b/spec/models/project_services/chat_message/pipeline_message_spec.rb
@@ -159,6 +159,45 @@ describe ChatMessage::PipelineMessage do
)
end
end
+
+ context 'when ref type is tag' do
+ before do
+ args[:object_attributes][:tag] = true
+ args[:object_attributes][:ref] = 'new_tag'
+ end
+
+ it "returns the pipeline summary in the activity's title" do
+ expect(subject.activity[:title]).to eq(
+ "Pipeline [#123](http://example.gitlab.com/pipelines/123)" \
+ " of tag [new_tag](http://example.gitlab.com/-/tags/new_tag)" \
+ " by The Hacker (hacker) passed"
+ )
+ end
+
+ it "returns the pipeline summary as the attachment's text property" do
+ expect(subject.attachments.first[:text]).to eq(
+ "<http://example.gitlab.com|project_name>:" \
+ " Pipeline <http://example.gitlab.com/pipelines/123|#123>" \
+ " of tag <http://example.gitlab.com/-/tags/new_tag|new_tag>" \
+ " by The Hacker (hacker) passed in 02:00:10"
+ )
+ end
+
+ context 'when rendering markdown' do
+ before do
+ args[:markdown] = true
+ end
+
+ it 'returns the pipeline summary as the attachments in markdown format' do
+ expect(subject.attachments).to eq(
+ "[project_name](http://example.gitlab.com):" \
+ " Pipeline [#123](http://example.gitlab.com/pipelines/123)" \
+ " of tag [new_tag](http://example.gitlab.com/-/tags/new_tag)" \
+ " by The Hacker (hacker) passed in 02:00:10"
+ )
+ end
+ end
+ end
end
context 'when the fancy_pipeline_slack_notifications feature flag is enabled' do
diff --git a/spec/models/project_services/chat_message/push_message_spec.rb b/spec/models/project_services/chat_message/push_message_spec.rb
index 2bde0b93fda..fe0b2fe3440 100644
--- a/spec/models/project_services/chat_message/push_message_spec.rb
+++ b/spec/models/project_services/chat_message/push_message_spec.rb
@@ -96,7 +96,7 @@ describe ChatMessage::PushMessage do
context 'without markdown' do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq('test.user pushed new tag ' \
- '<http://url.com/commits/new_tag|new_tag> to ' \
+ '<http://url.com/-/tags/new_tag|new_tag> to ' \
'<http://url.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -109,10 +109,10 @@ describe ChatMessage::PushMessage do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq(
- 'test.user pushed new tag [new_tag](http://url.com/commits/new_tag) to [project_name](http://url.com)')
+ 'test.user pushed new tag [new_tag](http://url.com/-/tags/new_tag) to [project_name](http://url.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq(
- title: 'test.user pushed new tag [new_tag](http://url.com/commits/new_tag)',
+ title: 'test.user pushed new tag [new_tag](http://url.com/-/tags/new_tag)',
subtitle: 'in [project_name](http://url.com)',
text: '[Compare changes](http://url.com/compare/0000000000000000000000000000000000000000...after)',
image: 'http://someavatar.com'
diff --git a/spec/models/project_services/data_fields_spec.rb b/spec/models/project_services/data_fields_spec.rb
index 146db0ae227..6b388a7222b 100644
--- a/spec/models/project_services/data_fields_spec.rb
+++ b/spec/models/project_services/data_fields_spec.rb
@@ -74,6 +74,12 @@ describe DataFields do
expect(service.url_changed?).to be_falsy
end
end
+
+ describe 'data_fields_present?' do
+ it 'returns true from the issue tracker service' do
+ expect(service.data_fields_present?).to be true
+ end
+ end
end
context 'when data are stored in data_fields' do
@@ -92,6 +98,18 @@ describe DataFields do
end
end
+ context 'when service and data_fields are not persisted' do
+ let(:service) do
+ JiraService.new
+ end
+
+ describe 'data_fields_present?' do
+ it 'returns true' do
+ expect(service.data_fields_present?).to be true
+ end
+ end
+ end
+
context 'when data are stored in properties' do
let(:service) { create(:jira_service, :without_properties_callback, properties: properties) }
diff --git a/spec/models/project_services/irker_service_spec.rb b/spec/models/project_services/irker_service_spec.rb
index 2e1f6964692..309dc51191b 100644
--- a/spec/models/project_services/irker_service_spec.rb
+++ b/spec/models/project_services/irker_service_spec.rb
@@ -60,7 +60,7 @@ describe IrkerService do
@irker_server.close
end
- it 'sends valid JSON messages to an Irker listener' do
+ it 'sends valid JSON messages to an Irker listener', :sidekiq_might_not_need_inline do
irker.execute(sample_data)
conn = @irker_server.accept
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index e5ac6ca65d6..bc22818ede7 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -65,6 +65,37 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
end
+
+ context 'with self-monitoring project and internal Prometheus' do
+ before do
+ service.api_url = 'http://localhost:9090'
+
+ stub_application_setting(instance_administration_project_id: project.id)
+ stub_config(prometheus: { enable: true, listen_address: 'localhost:9090' })
+ end
+
+ it 'allows self-monitoring project to connect to internal Prometheus' do
+ aggregate_failures do
+ ['127.0.0.1', '192.168.2.3'].each do |url|
+ allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
+
+ expect(service.can_query?).to be true
+ end
+ end
+ end
+
+ it 'does not allow self-monitoring project to connect to other local URLs' do
+ service.api_url = 'http://localhost:8000'
+
+ aggregate_failures do
+ ['127.0.0.1', '192.168.2.3'].each do |url|
+ allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
+
+ expect(service.can_query?).to be false
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/models/project_snippet_spec.rb b/spec/models/project_snippet_spec.rb
index e87b4f41f4d..46025507cb5 100644
--- a/spec/models/project_snippet_spec.rb
+++ b/spec/models/project_snippet_spec.rb
@@ -10,4 +10,25 @@ describe ProjectSnippet do
describe "Validation" do
it { is_expected.to validate_presence_of(:project) }
end
+
+ describe '#embeddable?' do
+ [
+ { project: :public, snippet: :public, embeddable: true },
+ { project: :internal, snippet: :public, embeddable: false },
+ { project: :private, snippet: :public, embeddable: false },
+ { project: :public, snippet: :internal, embeddable: false },
+ { project: :internal, snippet: :internal, embeddable: false },
+ { project: :private, snippet: :internal, embeddable: false },
+ { project: :public, snippet: :private, embeddable: false },
+ { project: :internal, snippet: :private, embeddable: false },
+ { project: :private, snippet: :private, embeddable: false }
+ ].each do |combination|
+ it 'only returns true when both project and snippet are public' do
+ project = create(:project, combination[:project])
+ snippet = build(:project_snippet, combination[:snippet], project: project)
+
+ expect(snippet.embeddable?).to eq(combination[:embeddable])
+ end
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 1bda3094e75..815ab7aa166 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2029,24 +2029,37 @@ describe Project do
end
describe '#ci_config_path=' do
- let(:project) { create(:project) }
+ using RSpec::Parameterized::TableSyntax
- it 'sets nil' do
- project.update!(ci_config_path: nil)
+ let(:project) { create(:project) }
- expect(project.ci_config_path).to be_nil
+ where(:default_ci_config_path, :project_ci_config_path, :expected_ci_config_path) do
+ nil | :notset | :default
+ nil | nil | :default
+ nil | '' | :default
+ nil | "cust\0om/\0/path" | 'custom//path'
+ '' | :notset | :default
+ '' | nil | :default
+ '' | '' | :default
+ '' | "cust\0om/\0/path" | 'custom//path'
+ 'global/path' | :notset | 'global/path'
+ 'global/path' | nil | :default
+ 'global/path' | '' | :default
+ 'global/path' | "cust\0om/\0/path" | 'custom//path'
end
- it 'sets a string' do
- project.update!(ci_config_path: 'foo/.gitlab_ci.yml')
-
- expect(project.ci_config_path).to eq('foo/.gitlab_ci.yml')
- end
+ with_them do
+ before do
+ stub_application_setting(default_ci_config_path: default_ci_config_path)
- it 'sets a string but removes all null characters' do
- project.update!(ci_config_path: "f\0oo/\0/.gitlab_ci.yml")
+ if project_ci_config_path != :notset
+ project.ci_config_path = project_ci_config_path
+ end
+ end
- expect(project.ci_config_path).to eq('foo//.gitlab_ci.yml')
+ it 'returns the correct path' do
+ expect(project.ci_config_path.presence || :default).to eq(expected_ci_config_path)
+ end
end
end
@@ -3342,22 +3355,6 @@ describe Project do
end
end
- describe '#append_or_update_attribute' do
- let(:project) { create(:project) }
-
- it 'shows full error updating an invalid MR' do
- expect { project.append_or_update_attribute(:merge_requests, [create(:merge_request)]) }
- .to raise_error(ActiveRecord::RecordInvalid, /Failed to set merge_requests:/)
- end
-
- it 'updates the project successfully' do
- merge_request = create(:merge_request, target_project: project, source_project: project)
-
- expect { project.append_or_update_attribute(:merge_requests, [merge_request]) }
- .not_to raise_error
- end
- end
-
describe '#update' do
let(:project) { create(:project) }
@@ -4284,22 +4281,25 @@ describe Project do
describe '#check_repository_path_availability' do
let(:project) { build(:project, :repository, :legacy_storage) }
- subject { project.check_repository_path_availability }
context 'when the repository already exists' do
let(:project) { create(:project, :repository, :legacy_storage) }
- it { is_expected.to be_falsey }
+ it 'returns false when repository already exists' do
+ expect(project.check_repository_path_availability).to be_falsey
+ end
end
context 'when the repository does not exist' do
- it { is_expected.to be_truthy }
+ it 'returns false when repository already exists' do
+ expect(project.check_repository_path_availability).to be_truthy
+ end
it 'skips gitlab-shell exists?' do
project.skip_disk_validation = true
expect(project.gitlab_shell).not_to receive(:repository_exists?)
- is_expected.to be_truthy
+ expect(project.check_repository_path_availability).to be_truthy
end
end
end
@@ -4631,7 +4631,7 @@ describe Project do
end
describe '#any_branch_allows_collaboration?' do
- it 'allows access when there are merge requests open allowing collaboration' do
+ it 'allows access when there are merge requests open allowing collaboration', :sidekiq_might_not_need_inline do
expect(project.any_branch_allows_collaboration?(user))
.to be_truthy
end
@@ -4645,7 +4645,7 @@ describe Project do
end
describe '#branch_allows_collaboration?' do
- it 'allows access if the user can merge the merge request' do
+ it 'allows access if the user can merge the merge request', :sidekiq_might_not_need_inline do
expect(project.branch_allows_collaboration?(user, 'awesome-feature-1'))
.to be_truthy
end
@@ -4899,20 +4899,6 @@ describe Project do
end
end
- describe '.find_without_deleted' do
- it 'returns nil if the project is about to be removed' do
- project = create(:project, pending_delete: true)
-
- expect(described_class.find_without_deleted(project.id)).to be_nil
- end
-
- it 'returns a project when it is not about to be removed' do
- project = create(:project)
-
- expect(described_class.find_without_deleted(project.id)).to eq(project)
- end
- end
-
describe '.for_group' do
it 'returns the projects for a given group' do
group = create(:group)
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 0aac325c2b2..f9c7a14f1f3 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Release do
expect(existing_release_without_name).to be_valid
expect(existing_release_without_name.description).to eq("change")
- expect(existing_release_without_name.name).to be_nil
+ expect(existing_release_without_name.name).not_to be_nil
end
end
@@ -57,14 +57,14 @@ RSpec.describe Release do
subject { release.assets_count }
it 'returns the number of sources' do
- is_expected.to eq(Releases::Source::FORMATS.count)
+ is_expected.to eq(Gitlab::Workhorse::ARCHIVE_FORMATS.count)
end
context 'when a links exists' do
let!(:link) { create(:release_link, release: release) }
it 'counts the link as an asset' do
- is_expected.to eq(1 + Releases::Source::FORMATS.count)
+ is_expected.to eq(1 + Gitlab::Workhorse::ARCHIVE_FORMATS.count)
end
it "excludes sources count when asked" do
@@ -92,7 +92,7 @@ RSpec.describe Release do
end
end
- describe 'evidence' do
+ describe 'evidence', :sidekiq_might_not_need_inline do
describe '#create_evidence!' do
context 'when a release is created' do
it 'creates one Evidence object too' do
@@ -129,4 +129,16 @@ RSpec.describe Release do
end
end
end
+
+ describe '#name' do
+ context 'name is nil' do
+ before do
+ release.update(name: nil)
+ end
+
+ it 'returns tag' do
+ expect(release.name).to eq(release.tag)
+ end
+ end
+ end
end
diff --git a/spec/models/releases/source_spec.rb b/spec/models/releases/source_spec.rb
index c5213196962..c8ac8e31c97 100644
--- a/spec/models/releases/source_spec.rb
+++ b/spec/models/releases/source_spec.rb
@@ -11,7 +11,7 @@ describe Releases::Source do
it 'returns all formats of sources' do
expect(subject.map(&:format))
- .to match_array(described_class::FORMATS)
+ .to match_array(Gitlab::Workhorse::ARCHIVE_FORMATS)
end
end
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 63d0bf3f314..79d45da8a1e 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -189,7 +189,7 @@ describe RemoteMirror, :mailer do
remote_mirror.project.add_maintainer(user)
end
- it 'notifies the project maintainers' do
+ it 'notifies the project maintainers', :sidekiq_might_not_need_inline do
perform_enqueued_jobs { subject }
should_email(user)
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 64077b76f01..f58bcbebd67 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -15,6 +15,26 @@ describe Service do
end
describe 'Scopes' do
+ describe '.by_type' do
+ let!(:service1) { create(:jira_service) }
+ let!(:service2) { create(:jira_service) }
+ let!(:service3) { create(:redmine_service) }
+
+ subject { described_class.by_type(type) }
+
+ context 'when type is "JiraService"' do
+ let(:type) { 'JiraService' }
+
+ it { is_expected.to match_array([service1, service2]) }
+ end
+
+ context 'when type is "RedmineService"' do
+ let(:type) { 'RedmineService' }
+
+ it { is_expected.to match_array([service3]) }
+ end
+ end
+
describe '.confidential_note_hooks' do
it 'includes services where confidential_note_events is true' do
create(:service, active: true, confidential_note_events: true)
diff --git a/spec/models/shard_spec.rb b/spec/models/shard_spec.rb
index 83104711b55..4da86858b54 100644
--- a/spec/models/shard_spec.rb
+++ b/spec/models/shard_spec.rb
@@ -1,4 +1,5 @@
-# frozen_string_literals: true
+# frozen_string_literal: true
+
require 'spec_helper'
describe Shard do
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index f4dcbfbc190..e4cc8931840 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -451,41 +451,4 @@ describe Snippet do
expect(blob.data).to eq(snippet.content)
end
end
-
- describe '#embeddable?' do
- context 'project snippet' do
- [
- { project: :public, snippet: :public, embeddable: true },
- { project: :internal, snippet: :public, embeddable: false },
- { project: :private, snippet: :public, embeddable: false },
- { project: :public, snippet: :internal, embeddable: false },
- { project: :internal, snippet: :internal, embeddable: false },
- { project: :private, snippet: :internal, embeddable: false },
- { project: :public, snippet: :private, embeddable: false },
- { project: :internal, snippet: :private, embeddable: false },
- { project: :private, snippet: :private, embeddable: false }
- ].each do |combination|
- it 'only returns true when both project and snippet are public' do
- project = create(:project, combination[:project])
- snippet = create(:project_snippet, combination[:snippet], project: project)
-
- expect(snippet.embeddable?).to eq(combination[:embeddable])
- end
- end
- end
-
- context 'personal snippet' do
- [
- { snippet: :public, embeddable: true },
- { snippet: :internal, embeddable: false },
- { snippet: :private, embeddable: false }
- ].each do |combination|
- it 'only returns true when snippet is public' do
- snippet = create(:personal_snippet, combination[:snippet])
-
- expect(snippet.embeddable?).to eq(combination[:embeddable])
- end
- end
- end
- end
end
diff --git a/spec/models/spam_log_spec.rb b/spec/models/spam_log_spec.rb
index e9ea234f75d..f4e073dc38f 100644
--- a/spec/models/spam_log_spec.rb
+++ b/spec/models/spam_log_spec.rb
@@ -20,7 +20,7 @@ describe SpamLog do
expect { spam_log.remove_user(deleted_by: admin) }.to change { spam_log.user.blocked? }.to(true)
end
- it 'removes the user' do
+ it 'removes the user', :sidekiq_might_not_need_inline do
spam_log = build(:spam_log)
user = spam_log.user
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 487a1c619c6..ea09c6caed3 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -150,6 +150,19 @@ describe Todo do
end
end
+ describe '#done?' do
+ let_it_be(:todo1) { create(:todo, state: :pending) }
+ let_it_be(:todo2) { create(:todo, state: :done) }
+
+ it 'returns true for todos with done state' do
+ expect(todo2.done?).to be_truthy
+ end
+
+ it 'returns false for todos with state pending' do
+ expect(todo1.done?).to be_falsey
+ end
+ end
+
describe '#self_assigned?' do
let(:user_1) { build(:user) }
@@ -208,6 +221,40 @@ describe Todo do
expect(described_class.for_project(project1)).to eq([todo])
end
+
+ it 'returns the todos for many projects' do
+ project1 = create(:project)
+ project2 = create(:project)
+ project3 = create(:project)
+
+ todo1 = create(:todo, project: project1)
+ todo2 = create(:todo, project: project2)
+ create(:todo, project: project3)
+
+ expect(described_class.for_project([project2, project1])).to contain_exactly(todo2, todo1)
+ end
+ end
+
+ describe '.for_undeleted_projects' do
+ let(:project1) { create(:project) }
+ let(:project2) { create(:project) }
+ let(:project3) { create(:project) }
+
+ let!(:todo1) { create(:todo, project: project1) }
+ let!(:todo2) { create(:todo, project: project2) }
+ let!(:todo3) { create(:todo, project: project3) }
+
+ it 'returns the todos for a given project' do
+ expect(described_class.for_undeleted_projects).to contain_exactly(todo1, todo2, todo3)
+ end
+
+ context 'when todo belongs to deleted project' do
+ let(:project2) { create(:project, pending_delete: true) }
+
+ it 'excludes todos of deleted projects' do
+ expect(described_class.for_undeleted_projects).to contain_exactly(todo1, todo3)
+ end
+ end
end
describe '.for_group' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 8eb2f9b5bc0..ee7edb1516c 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe User do
+describe User, :do_not_mock_admin_mode do
include ProjectForksHelper
include TermsHelper
@@ -2797,10 +2797,26 @@ describe User do
expect(user.full_private_access?).to be_falsy
end
- it 'returns true for admin user' do
- user = build(:user, :admin)
+ context 'for admin user' do
+ include_context 'custom session'
- expect(user.full_private_access?).to be_truthy
+ let(:user) { build(:user, :admin) }
+
+ context 'when admin mode is disabled' do
+ it 'returns false' do
+ expect(user.full_private_access?).to be_falsy
+ end
+ end
+
+ context 'when admin mode is enabled' do
+ before do
+ Gitlab::Auth::CurrentUserMode.new(user).enable_admin_mode!(password: user.password)
+ end
+
+ it 'returns true' do
+ expect(user.full_private_access?).to be_truthy
+ end
+ end
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 9014276dcf8..a7c28519c5a 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -563,17 +563,6 @@ describe WikiPage do
end
end
- describe '#formatted_content' do
- it 'returns processed content of the page' do
- subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
- page = wiki.find_page('RDoc')
-
- expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
-
- destroy_page('RDoc')
- end
- end
-
describe '#hook_attrs' do
it 'adds absolute urls for images in the content' do
create_page("test page", "test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)")
diff --git a/spec/models/zoom_meeting_spec.rb b/spec/models/zoom_meeting_spec.rb
new file mode 100644
index 00000000000..3dad957a1ce
--- /dev/null
+++ b/spec/models/zoom_meeting_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ZoomMeeting do
+ let(:project) { build(:project) }
+
+ describe 'Factory' do
+ subject { build(:zoom_meeting) }
+
+ it { is_expected.to be_valid }
+ end
+
+ describe 'Associations' do
+ it { is_expected.to belong_to(:project).required }
+ it { is_expected.to belong_to(:issue).required }
+ end
+
+ describe 'scopes' do
+ let(:issue) { create(:issue, project: project) }
+ let!(:added_meeting) { create(:zoom_meeting, :added_to_issue, issue: issue) }
+ let!(:removed_meeting) { create(:zoom_meeting, :removed_from_issue, issue: issue) }
+
+ describe '.added_to_issue' do
+ it 'gets only added meetings' do
+ meetings_added = described_class.added_to_issue.pluck(:id)
+
+ expect(meetings_added).to include(added_meeting.id)
+ expect(meetings_added).not_to include(removed_meeting.id)
+ end
+ end
+ describe '.removed_from_issue' do
+ it 'gets only removed meetings' do
+ meetings_removed = described_class.removed_from_issue.pluck(:id)
+
+ expect(meetings_removed).to include(removed_meeting.id)
+ expect(meetings_removed).not_to include(added_meeting.id)
+ end
+ end
+ end
+
+ describe 'Validations' do
+ describe 'url' do
+ it { is_expected.to validate_presence_of(:url) }
+ it { is_expected.to validate_length_of(:url).is_at_most(255) }
+
+ shared_examples 'invalid Zoom URL' do
+ it do
+ expect(subject).to be_invalid
+ expect(subject.errors[:url])
+ .to contain_exactly('must contain one valid Zoom URL')
+ end
+ end
+
+ context 'with non-Zoom URL' do
+ before do
+ subject.url = %{https://non-zoom.url}
+ end
+
+ include_examples 'invalid Zoom URL'
+ end
+
+ context 'with multiple Zoom-URLs' do
+ before do
+ subject.url = %{https://zoom.us/j/123 https://zoom.us/j/456}
+ end
+
+ include_examples 'invalid Zoom URL'
+ end
+ end
+
+ describe 'issue association' do
+ let(:issue) { build(:issue, project: project) }
+
+ subject { build(:zoom_meeting, project: project, issue: issue) }
+
+ context 'for the same project' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'for a different project' do
+ let(:issue) { build(:issue) }
+
+ it do
+ expect(subject).to be_invalid
+ expect(subject.errors[:issue])
+ .to contain_exactly('must associate the same project')
+ end
+ end
+ end
+ end
+
+ describe 'limit number of meetings per issue' do
+ shared_examples 'can add meetings' do
+ it 'can add new Zoom meetings' do
+ create(:zoom_meeting, :added_to_issue, issue: issue)
+ end
+ end
+
+ shared_examples 'can remove meetings' do
+ it 'can remove Zoom meetings' do
+ create(:zoom_meeting, :removed_from_issue, issue: issue)
+ end
+ end
+
+ shared_examples 'cannot add meetings' do
+ it 'fails to add a new meeting' do
+ expect do
+ create(:zoom_meeting, :added_to_issue, issue: issue)
+ end.to raise_error ActiveRecord::RecordNotUnique
+ end
+ end
+
+ let(:issue) { create(:issue, project: project) }
+
+ context 'without meetings' do
+ it_behaves_like 'can add meetings'
+ end
+
+ context 'when no other meeting is added' do
+ before do
+ create(:zoom_meeting, :removed_from_issue, issue: issue)
+ end
+
+ it_behaves_like 'can add meetings'
+ end
+
+ context 'when meeting is added' do
+ before do
+ create(:zoom_meeting, :added_to_issue, issue: issue)
+ end
+
+ it_behaves_like 'cannot add meetings'
+ end
+
+ context 'when meeting is added to another issue' do
+ let(:another_issue) { create(:issue, project: project) }
+
+ before do
+ create(:zoom_meeting, :added_to_issue, issue: another_issue)
+ end
+
+ it_behaves_like 'can add meetings'
+ end
+
+ context 'when second meeting is removed' do
+ before do
+ create(:zoom_meeting, :removed_from_issue, issue: issue)
+ end
+
+ it_behaves_like 'can remove meetings'
+ end
+ end
+end
diff --git a/spec/policies/application_setting/term_policy_spec.rb b/spec/policies/application_setting/term_policy_spec.rb
index 93b5ebf5f72..21690d4b457 100644
--- a/spec/policies/application_setting/term_policy_spec.rb
+++ b/spec/policies/application_setting/term_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ApplicationSetting::TermPolicy do
diff --git a/spec/policies/base_policy_spec.rb b/spec/policies/base_policy_spec.rb
index 09be831dcd5..81aee4cfcac 100644
--- a/spec/policies/base_policy_spec.rb
+++ b/spec/policies/base_policy_spec.rb
@@ -1,7 +1,10 @@
+# frozen_string_literal: true
+
require 'spec_helper'
-describe BasePolicy do
+describe BasePolicy, :do_not_mock_admin_mode do
include ExternalAuthorizationServiceHelpers
+ include AdminModeHelper
describe '.class_for' do
it 'detects policy class based on the subject ancestors' do
@@ -34,8 +37,42 @@ describe BasePolicy do
it { is_expected.not_to be_allowed(:read_cross_project) }
- it 'allows admins' do
- expect(described_class.new(build(:admin), nil)).to be_allowed(:read_cross_project)
+ context 'for admins' do
+ let(:current_user) { build(:admin) }
+
+ subject { described_class.new(current_user, nil) }
+
+ it 'allowed when in admin mode' do
+ enable_admin_mode!(current_user)
+
+ is_expected.to be_allowed(:read_cross_project)
+ end
+
+ it 'prevented when not in admin mode' do
+ is_expected.not_to be_allowed(:read_cross_project)
+ end
+ end
+ end
+ end
+
+ describe 'full private access' do
+ let(:current_user) { create(:user) }
+
+ subject { described_class.new(current_user, nil) }
+
+ it { is_expected.not_to be_allowed(:read_all_resources) }
+
+ context 'for admins' do
+ let(:current_user) { build(:admin) }
+
+ it 'allowed when in admin mode' do
+ enable_admin_mode!(current_user)
+
+ is_expected.to be_allowed(:read_all_resources)
+ end
+
+ it 'prevented when not in admin mode' do
+ is_expected.not_to be_allowed(:read_all_resources)
end
end
end
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index 79a616899fa..333f4e560cf 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::BuildPolicy do
diff --git a/spec/policies/ci/pipeline_policy_spec.rb b/spec/policies/ci/pipeline_policy_spec.rb
index 126d44d1860..293fe1fc5b9 100644
--- a/spec/policies/ci/pipeline_policy_spec.rb
+++ b/spec/policies/ci/pipeline_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::PipelinePolicy, :models do
diff --git a/spec/policies/ci/pipeline_schedule_policy_spec.rb b/spec/policies/ci/pipeline_schedule_policy_spec.rb
index 5a56e91cd69..700d7d1af0a 100644
--- a/spec/policies/ci/pipeline_schedule_policy_spec.rb
+++ b/spec/policies/ci/pipeline_schedule_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::PipelineSchedulePolicy, :models do
diff --git a/spec/policies/ci/trigger_policy_spec.rb b/spec/policies/ci/trigger_policy_spec.rb
index e9a85890082..e936277a391 100644
--- a/spec/policies/ci/trigger_policy_spec.rb
+++ b/spec/policies/ci/trigger_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::TriggerPolicy do
diff --git a/spec/policies/clusters/cluster_policy_spec.rb b/spec/policies/clusters/cluster_policy_spec.rb
index cc3dde154dc..55c3351a171 100644
--- a/spec/policies/clusters/cluster_policy_spec.rb
+++ b/spec/policies/clusters/cluster_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Clusters::ClusterPolicy, :models do
diff --git a/spec/policies/deploy_key_policy_spec.rb b/spec/policies/deploy_key_policy_spec.rb
index e7263d49613..aca93d8fe85 100644
--- a/spec/policies/deploy_key_policy_spec.rb
+++ b/spec/policies/deploy_key_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe DeployKeyPolicy do
diff --git a/spec/policies/deploy_token_policy_spec.rb b/spec/policies/deploy_token_policy_spec.rb
index cef5a4a22bc..43e23ee55ac 100644
--- a/spec/policies/deploy_token_policy_spec.rb
+++ b/spec/policies/deploy_token_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe DeployTokenPolicy do
diff --git a/spec/policies/environment_policy_spec.rb b/spec/policies/environment_policy_spec.rb
index 0442b032e89..3d0f250740c 100644
--- a/spec/policies/environment_policy_spec.rb
+++ b/spec/policies/environment_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe EnvironmentPolicy do
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 880f1bcbc05..c18cc245468 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe GlobalPolicy do
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index aeb09c1dc3a..ae9d125f970 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe GroupPolicy do
diff --git a/spec/policies/issuable_policy_spec.rb b/spec/policies/issuable_policy_spec.rb
index 6d34b0a8b4b..18e35308ecd 100644
--- a/spec/policies/issuable_policy_spec.rb
+++ b/spec/policies/issuable_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe IssuablePolicy, models: true do
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 25267d36ab8..89fcf3c10df 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe IssuePolicy do
diff --git a/spec/policies/merge_request_policy_spec.rb b/spec/policies/merge_request_policy_spec.rb
index af4c9703eb4..287325e96df 100644
--- a/spec/policies/merge_request_policy_spec.rb
+++ b/spec/policies/merge_request_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe MergeRequestPolicy do
diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespace_policy_spec.rb
index 909c17fe8b5..c0a5119c550 100644
--- a/spec/policies/namespace_policy_spec.rb
+++ b/spec/policies/namespace_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe NamespacePolicy do
diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb
index d18ded8bce9..5aee66275d4 100644
--- a/spec/policies/note_policy_spec.rb
+++ b/spec/policies/note_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe NotePolicy do
diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb
index 097000ceb6a..36b4ac16cf0 100644
--- a/spec/policies/personal_snippet_policy_spec.rb
+++ b/spec/policies/personal_snippet_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
@@ -18,6 +20,19 @@ describe PersonalSnippetPolicy do
described_class.new(user, snippet)
end
+ shared_examples 'admin access' do
+ context 'admin user' do
+ subject { permissions(admin_user) }
+
+ it do
+ is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:create_note)
+ is_expected.to be_allowed(:award_emoji)
+ is_expected.to be_allowed(*author_permissions)
+ end
+ end
+ end
+
context 'public snippet' do
let(:snippet) { create(:personal_snippet, :public) }
@@ -53,6 +68,8 @@ describe PersonalSnippetPolicy do
is_expected.to be_allowed(*author_permissions)
end
end
+
+ it_behaves_like 'admin access'
end
context 'internal snippet' do
@@ -101,6 +118,8 @@ describe PersonalSnippetPolicy do
is_expected.to be_allowed(*author_permissions)
end
end
+
+ it_behaves_like 'admin access'
end
context 'private snippet' do
@@ -128,17 +147,6 @@ describe PersonalSnippetPolicy do
end
end
- context 'admin user' do
- subject { permissions(admin_user) }
-
- it do
- is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_disallowed(:create_note)
- is_expected.to be_disallowed(:award_emoji)
- is_expected.to be_disallowed(*author_permissions)
- end
- end
-
context 'external user' do
subject { permissions(external_user) }
@@ -160,5 +168,7 @@ describe PersonalSnippetPolicy do
is_expected.to be_allowed(*author_permissions)
end
end
+
+ it_behaves_like 'admin access'
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index e61a064e82c..ab54d97f2a2 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ProjectPolicy do
@@ -313,6 +315,31 @@ describe ProjectPolicy do
end
end
+ context 'pipeline feature' do
+ let(:project) { create(:project) }
+
+ describe 'for unconfirmed user' do
+ let(:unconfirmed_user) { create(:user, confirmed_at: nil) }
+ subject { described_class.new(unconfirmed_user, project) }
+
+ it 'disallows to modify pipelines' do
+ expect_disallowed(:create_pipeline)
+ expect_disallowed(:update_pipeline)
+ expect_disallowed(:create_pipeline_schedule)
+ end
+ end
+
+ describe 'for confirmed user' do
+ subject { described_class.new(developer, project) }
+
+ it 'allows modify pipelines' do
+ expect_allowed(:create_pipeline)
+ expect_allowed(:update_pipeline)
+ expect_allowed(:create_pipeline_schedule)
+ end
+ end
+ end
+
context 'builds feature' do
context 'when builds are disabled' do
subject { described_class.new(owner, project) }
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index 2e9ef1e89fd..3c68d33b1f3 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
diff --git a/spec/policies/protected_branch_policy_spec.rb b/spec/policies/protected_branch_policy_spec.rb
index 1587196754d..ea7fd093e38 100644
--- a/spec/policies/protected_branch_policy_spec.rb
+++ b/spec/policies/protected_branch_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ProtectedBranchPolicy do
diff --git a/spec/policies/resource_label_event_policy_spec.rb b/spec/policies/resource_label_event_policy_spec.rb
index 9206640ea00..799534d2b08 100644
--- a/spec/policies/resource_label_event_policy_spec.rb
+++ b/spec/policies/resource_label_event_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ResourceLabelEventPolicy do
diff --git a/spec/policies/user_policy_spec.rb b/spec/policies/user_policy_spec.rb
index 7e0a1824200..9da9d2ce49b 100644
--- a/spec/policies/user_policy_spec.rb
+++ b/spec/policies/user_policy_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe UserPolicy do
diff --git a/spec/presenters/ci/bridge_presenter_spec.rb b/spec/presenters/ci/bridge_presenter_spec.rb
index 986818a7b9e..1c2eeced20c 100644
--- a/spec/presenters/ci/bridge_presenter_spec.rb
+++ b/spec/presenters/ci/bridge_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::BridgePresenter do
diff --git a/spec/presenters/ci/build_presenter_spec.rb b/spec/presenters/ci/build_presenter_spec.rb
index e202f7a9b5f..b6c47f40ceb 100644
--- a/spec/presenters/ci/build_presenter_spec.rb
+++ b/spec/presenters/ci/build_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::BuildPresenter do
@@ -267,7 +269,7 @@ describe Ci::BuildPresenter do
let(:build) { create(:ci_build, :failed, :script_failure) }
context 'when is a script or missing dependency failure' do
- let(:failure_reasons) { %w(script_failure missing_dependency_failure archived_failure) }
+ let(:failure_reasons) { %w(script_failure missing_dependency_failure archived_failure scheduler_failure data_integrity_failure) }
it 'returns false' do
failure_reasons.each do |failure_reason|
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index fa8791f2257..017e94d04f1 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::BuildRunnerPresenter do
diff --git a/spec/presenters/ci/group_variable_presenter_spec.rb b/spec/presenters/ci/group_variable_presenter_spec.rb
index cb58a757564..3b81a425f5b 100644
--- a/spec/presenters/ci/group_variable_presenter_spec.rb
+++ b/spec/presenters/ci/group_variable_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::GroupVariablePresenter do
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index 8cfcd9befb3..eca5d3e05fe 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::PipelinePresenter do
diff --git a/spec/presenters/ci/trigger_presenter_spec.rb b/spec/presenters/ci/trigger_presenter_spec.rb
index 231b539c188..ac3967f4f77 100644
--- a/spec/presenters/ci/trigger_presenter_spec.rb
+++ b/spec/presenters/ci/trigger_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::TriggerPresenter do
diff --git a/spec/presenters/ci/variable_presenter_spec.rb b/spec/presenters/ci/variable_presenter_spec.rb
index e3ce88372ea..70cf2f539b6 100644
--- a/spec/presenters/ci/variable_presenter_spec.rb
+++ b/spec/presenters/ci/variable_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Ci::VariablePresenter do
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index 6b988e2645b..8bc5374f2db 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Clusters::ClusterPresenter do
diff --git a/spec/presenters/commit_status_presenter_spec.rb b/spec/presenters/commit_status_presenter_spec.rb
index 2b7742ddbb8..b02497d4c11 100644
--- a/spec/presenters/commit_status_presenter_spec.rb
+++ b/spec/presenters/commit_status_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe CommitStatusPresenter do
diff --git a/spec/presenters/conversational_development_index/metric_presenter_spec.rb b/spec/presenters/conversational_development_index/metric_presenter_spec.rb
index b8b68a676e6..ac18d5203e5 100644
--- a/spec/presenters/conversational_development_index/metric_presenter_spec.rb
+++ b/spec/presenters/conversational_development_index/metric_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ConversationalDevelopmentIndex::MetricPresenter do
diff --git a/spec/presenters/group_clusterable_presenter_spec.rb b/spec/presenters/group_clusterable_presenter_spec.rb
index fa77273f6aa..11a8decc9cc 100644
--- a/spec/presenters/group_clusterable_presenter_spec.rb
+++ b/spec/presenters/group_clusterable_presenter_spec.rb
@@ -43,6 +43,12 @@ describe GroupClusterablePresenter do
it { is_expected.to eq(new_group_cluster_path(group)) }
end
+ describe '#authorize_aws_role_path' do
+ subject { presenter.authorize_aws_role_path }
+
+ it { is_expected.to eq(authorize_aws_role_group_clusters_path(group)) }
+ end
+
describe '#create_user_clusters_path' do
subject { presenter.create_user_clusters_path }
diff --git a/spec/presenters/group_member_presenter_spec.rb b/spec/presenters/group_member_presenter_spec.rb
index bb66523a83d..382b1881ab7 100644
--- a/spec/presenters/group_member_presenter_spec.rb
+++ b/spec/presenters/group_member_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe GroupMemberPresenter do
diff --git a/spec/presenters/instance_clusterable_presenter_spec.rb b/spec/presenters/instance_clusterable_presenter_spec.rb
new file mode 100644
index 00000000000..9f1268379f5
--- /dev/null
+++ b/spec/presenters/instance_clusterable_presenter_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe InstanceClusterablePresenter do
+ include Gitlab::Routing.url_helpers
+
+ let(:presenter) { described_class.new(instance) }
+ let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+ let(:instance) { cluster.instance }
+
+ describe '#create_aws_clusters_path' do
+ subject { described_class.new(instance).create_aws_clusters_path }
+
+ it { is_expected.to eq(create_aws_admin_clusters_path) }
+ end
+
+ describe '#authorize_aws_role_path' do
+ subject { described_class.new(instance).authorize_aws_role_path }
+
+ it { is_expected.to eq(authorize_aws_role_admin_clusters_path) }
+ end
+
+ describe '#revoke_aws_role_path' do
+ subject { described_class.new(instance).revoke_aws_role_path }
+
+ it { is_expected.to eq(revoke_aws_role_admin_clusters_path) }
+ end
+
+ describe '#aws_api_proxy_path' do
+ let(:resource) { 'resource' }
+
+ subject { described_class.new(instance).aws_api_proxy_path(resource) }
+
+ it { is_expected.to eq(aws_proxy_admin_clusters_path(resource: resource)) }
+ end
+end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 6408b0bd748..ce437090d43 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe MergeRequestPresenter do
diff --git a/spec/presenters/project_clusterable_presenter_spec.rb b/spec/presenters/project_clusterable_presenter_spec.rb
index 6786a84243f..441c2a50fea 100644
--- a/spec/presenters/project_clusterable_presenter_spec.rb
+++ b/spec/presenters/project_clusterable_presenter_spec.rb
@@ -43,6 +43,12 @@ describe ProjectClusterablePresenter do
it { is_expected.to eq(new_project_cluster_path(project)) }
end
+ describe '#authorize_aws_role_path' do
+ subject { presenter.authorize_aws_role_path }
+
+ it { is_expected.to eq(authorize_aws_role_project_clusters_path(project)) }
+ end
+
describe '#create_user_clusters_path' do
subject { presenter.create_user_clusters_path }
diff --git a/spec/presenters/project_member_presenter_spec.rb b/spec/presenters/project_member_presenter_spec.rb
index 73ef113a1c5..743c89fc7c2 100644
--- a/spec/presenters/project_member_presenter_spec.rb
+++ b/spec/presenters/project_member_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ProjectMemberPresenter do
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 2a00548c2c3..ce095d2225f 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe ProjectPresenter do
@@ -310,8 +312,8 @@ describe ProjectPresenter do
project.add_developer(user)
allow(project.repository).to receive(:license_blob).and_return(nil)
- expect(presenter.license_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('Add license'),
+ expect(presenter.license_anchor_data).to have_attributes(is_link: false,
+ label: a_string_including('Add LICENSE'),
link: presenter.add_license_path)
end
end
@@ -320,7 +322,7 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:license_blob).and_return(double(name: 'foo'))
- expect(presenter.license_anchor_data).to have_attributes(is_link: true,
+ expect(presenter.license_anchor_data).to have_attributes(is_link: false,
label: a_string_including(presenter.license_short_name),
link: presenter.license_path)
end
@@ -418,6 +420,7 @@ describe ProjectPresenter do
it 'orders the items correctly' do
allow(project.repository).to receive(:readme).and_return(double(name: 'readme'))
+ allow(project.repository).to receive(:license_blob).and_return(nil)
allow(project.repository).to receive(:changelog).and_return(nil)
allow(project.repository).to receive(:contribution_guide).and_return(double(name: 'foo'))
allow(presenter).to receive(:filename_path).and_return('fake/path')
@@ -431,25 +434,54 @@ describe ProjectPresenter do
end
end
- describe '#empty_repo_statistics_buttons' do
- let(:project) { create(:project, :repository) }
+ describe '#repo_statistics_buttons' do
let(:presenter) { described_class.new(project, current_user: user) }
-
subject(:empty_repo_statistics_buttons) { presenter.empty_repo_statistics_buttons }
before do
- project.add_developer(user)
allow(project).to receive(:auto_devops_enabled?).and_return(false)
end
- it 'orders the items correctly in an empty project' do
- expect(empty_repo_statistics_buttons.map(&:label)).to start_with(
- a_string_including('New'),
- a_string_including('README'),
- a_string_including('CHANGELOG'),
- a_string_including('CONTRIBUTING'),
- a_string_including('CI/CD')
- )
+ context 'empty repo' do
+ let(:project) { create(:project, :stubbed_repository)}
+
+ context 'for a guest user' do
+ it 'orders the items correctly' do
+ expect(empty_repo_statistics_buttons.map(&:label)).to start_with(
+ a_string_including('No license')
+ )
+ end
+ end
+
+ context 'for a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'orders the items correctly' do
+ expect(empty_repo_statistics_buttons.map(&:label)).to start_with(
+ a_string_including('New'),
+ a_string_including('README'),
+ a_string_including('LICENSE'),
+ a_string_including('CHANGELOG'),
+ a_string_including('CONTRIBUTING'),
+ a_string_including('CI/CD')
+ )
+ end
+ end
+ end
+
+ context 'initialized repo' do
+ let(:project) { create(:project, :repository) }
+
+ it 'orders the items correctly' do
+ expect(empty_repo_statistics_buttons.map(&:label)).to start_with(
+ a_string_including('README'),
+ a_string_including('License'),
+ a_string_including('CHANGELOG'),
+ a_string_including('CONTRIBUTING')
+ )
+ end
end
end
end
diff --git a/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb b/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb
index b4bf39f3cdb..de58733c8ea 100644
--- a/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb
+++ b/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Projects::Settings::DeployKeysPresenter do
diff --git a/spec/presenters/release_presenter_spec.rb b/spec/presenters/release_presenter_spec.rb
new file mode 100644
index 00000000000..4d9fa7a4d75
--- /dev/null
+++ b/spec/presenters/release_presenter_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ReleasePresenter do
+ include Gitlab::Routing.url_helpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let(:developer) { create(:user) }
+ let(:guest) { create(:user) }
+ let(:user) { developer }
+ let(:release) { create(:release, project: project) }
+ let(:presenter) { described_class.new(release, current_user: user) }
+
+ before do
+ project.add_developer(developer)
+ project.add_guest(guest)
+ end
+
+ describe '#commit_path' do
+ subject { presenter.commit_path }
+
+ it 'returns commit path' do
+ is_expected.to eq(project_commit_path(project, release.commit.id))
+ end
+
+ context 'when commit is not found' do
+ let(:release) { create(:release, project: project, sha: 'not-found') }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when user is guest' do
+ let(:user) { guest }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#tag_path' do
+ subject { presenter.tag_path }
+
+ it 'returns tag path' do
+ is_expected.to eq(project_tag_path(project, release.tag))
+ end
+
+ context 'when user is guest' do
+ let(:user) { guest }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#merge_requests_url' do
+ subject { presenter.merge_requests_url }
+
+ it 'returns merge requests url' do
+ is_expected.to match /#{project_merge_requests_url(project)}/
+ end
+
+ context 'when release_mr_issue_urls feature flag is disabled' do
+ before do
+ stub_feature_flags(release_mr_issue_urls: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#issues_url' do
+ subject { presenter.issues_url }
+
+ it 'returns merge requests url' do
+ is_expected.to match /#{project_issues_url(project)}/
+ end
+
+ context 'when release_mr_issue_urls feature flag is disabled' do
+ before do
+ stub_feature_flags(release_mr_issue_urls: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#edit_url' do
+ subject { presenter.edit_url }
+
+ it 'returns release edit url' do
+ is_expected.to match /#{edit_project_release_url(project, release)}/
+ end
+
+ context 'when release_edit_page feature flag is disabled' do
+ before do
+ stub_feature_flags(release_edit_page: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb
index 100f3d33c7b..3bfca00776f 100644
--- a/spec/requests/api/access_requests_spec.rb
+++ b/spec/requests/api/access_requests_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::AccessRequests do
diff --git a/spec/requests/api/applications_spec.rb b/spec/requests/api/applications_spec.rb
index 53fc3096751..438d5dbf018 100644
--- a/spec/requests/api/applications_spec.rb
+++ b/spec/requests/api/applications_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Applications, :api do
diff --git a/spec/requests/api/avatar_spec.rb b/spec/requests/api/avatar_spec.rb
index 9bc49bd5982..c8bc7f8a4a2 100644
--- a/spec/requests/api/avatar_spec.rb
+++ b/spec/requests/api/avatar_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Avatar do
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 342fcfa1041..80040cddd4d 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::AwardEmoji do
diff --git a/spec/requests/api/badges_spec.rb b/spec/requests/api/badges_spec.rb
index 771a78a2d91..ea0a7d4c9b7 100644
--- a/spec/requests/api/badges_spec.rb
+++ b/spec/requests/api/badges_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Badges do
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index 0b9c0c2ebe9..8a67e956165 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Boards do
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index f9c8b42afa8..675b06b057c 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Branches do
@@ -117,6 +119,25 @@ describe API::Branches do
it_behaves_like 'repository branches'
end
+
+ it 'does not submit N+1 DB queries', :request_store do
+ create(:protected_branch, name: 'master', project: project)
+
+ # Make sure no setup step query is recorded.
+ get api(route, current_user), params: { per_page: 100 }
+
+ control = ActiveRecord::QueryRecorder.new do
+ get api(route, current_user), params: { per_page: 100 }
+ end
+
+ new_branch_name = 'protected-branch'
+ CreateBranchService.new(project, current_user).execute(new_branch_name, 'master')
+ create(:protected_branch, name: new_branch_name, project: project)
+
+ expect do
+ get api(route, current_user), params: { per_page: 100 }
+ end.not_to exceed_query_limit(control)
+ end
end
context 'when authenticated', 'as a guest' do
@@ -602,7 +623,7 @@ describe API::Branches do
post api(route, user), params: { branch: 'new_design3', ref: 'foo' }
expect(response).to have_gitlab_http_status(400)
- expect(json_response['message']).to eq('Invalid reference name')
+ expect(json_response['message']).to eq('Invalid reference name: new_design3')
end
end
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb
index 0b48b79219c..541acb29857 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/broadcast_messages_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::BroadcastMessages do
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index 6cb02ba2f6b..639b8e96343 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::CommitStatuses do
@@ -278,7 +280,7 @@ describe API::CommitStatuses do
}
end
- it 'update the correct pipeline' do
+ it 'update the correct pipeline', :sidekiq_might_not_need_inline do
subject
expect(first_pipeline.reload.status).to eq('created')
@@ -302,7 +304,7 @@ describe API::CommitStatuses do
expect(json_response['status']).to eq('success')
end
- it 'retries a commit status' do
+ it 'retries a commit status', :sidekiq_might_not_need_inline do
expect(CommitStatus.count).to eq 2
expect(CommitStatus.first).to be_retried
expect(CommitStatus.last.pipeline).to be_success
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 90ff1d12bf1..d8da1c001b0 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'mime/types'
@@ -369,7 +371,7 @@ describe API::Commits do
valid_c_params[:start_project] = public_project.id
end
- it 'adds a new commit to forked_project and returns a 201' do
+ it 'adds a new commit to forked_project and returns a 201', :sidekiq_might_not_need_inline do
expect_request_with_status(201) { post api(url, guest), params: valid_c_params }
.to change { last_commit_id(forked_project, valid_c_params[:branch]) }
.and not_change { last_commit_id(public_project, valid_c_params[:start_branch]) }
@@ -381,14 +383,14 @@ describe API::Commits do
valid_c_params[:start_project] = public_project.full_path
end
- it 'adds a new commit to forked_project and returns a 201' do
+ it 'adds a new commit to forked_project and returns a 201', :sidekiq_might_not_need_inline do
expect_request_with_status(201) { post api(url, guest), params: valid_c_params }
.to change { last_commit_id(forked_project, valid_c_params[:branch]) }
.and not_change { last_commit_id(public_project, valid_c_params[:start_branch]) }
end
end
- context 'when branch already exists' do
+ context 'when branch already exists', :sidekiq_might_not_need_inline do
before do
valid_c_params.delete(:start_branch)
valid_c_params[:branch] = 'master'
@@ -835,7 +837,7 @@ describe API::Commits do
}
end
- it 'allows pushing to the source branch of the merge request' do
+ it 'allows pushing to the source branch of the merge request', :sidekiq_might_not_need_inline do
post api(url, user), params: push_params('feature')
expect(response).to have_gitlab_http_status(:created)
@@ -1087,6 +1089,20 @@ describe API::Commits do
expect(json_response.first.keys).to include 'diff'
end
+ context 'when hard limits are lower than the number of files' do
+ before do
+ allow(Commit).to receive(:max_diff_options).and_return(max_files: 1)
+ end
+
+ it 'respects the limit' do
+ get api(route, current_user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.size).to be <= 1
+ end
+ end
+
context 'when ref does not exist' do
let(:commit_id) { 'unknown' }
@@ -1360,6 +1376,12 @@ describe API::Commits do
it_behaves_like '400 response' do
let(:request) { post api(route, current_user), params: { branch: 'markdown' } }
end
+
+ it 'includes an error_code in the response' do
+ post api(route, current_user), params: { branch: 'markdown' }
+
+ expect(json_response['error_code']).to eq 'empty'
+ end
end
context 'when ref contains a dot' do
@@ -1417,7 +1439,7 @@ describe API::Commits do
let(:project_id) { forked_project.id }
- it 'allows access from a maintainer that to the source branch' do
+ it 'allows access from a maintainer that to the source branch', :sidekiq_might_not_need_inline do
post api(route, user), params: { branch: 'feature' }
expect(response).to have_gitlab_http_status(:created)
@@ -1519,6 +1541,19 @@ describe API::Commits do
let(:request) { post api(route, current_user) }
end
end
+
+ context 'when commit is already reverted in the target branch' do
+ it 'includes an error_code in the response' do
+ # First one actually reverts
+ post api(route, current_user), params: { branch: 'markdown' }
+
+ # Second one is redundant and should be empty
+ post api(route, current_user), params: { branch: 'markdown' }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['error_code']).to eq 'empty'
+ end
+ end
end
context 'when authenticated', 'as a developer' do
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index e0cc18abcca..4579ccfad80 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::DeployKeys do
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index ad7be531979..26849c0991d 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -12,9 +12,9 @@ describe API::Deployments do
describe 'GET /projects/:id/deployments' do
let(:project) { create(:project) }
- let!(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: Time.now) }
- let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago) }
- let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'patch', created_at: 2.days.ago) }
+ let!(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now) }
+ let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago) }
+ let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'patch', created_at: 2.days.ago, updated_at: 1.hour.ago) }
context 'as member of the project' do
it 'returns projects deployments sorted by id asc' do
@@ -57,6 +57,8 @@ describe API::Deployments do
'iid' | 'desc' | [:deployment_2, :deployment_1, :deployment_3]
'ref' | 'asc' | [:deployment_2, :deployment_1, :deployment_3]
'ref' | 'desc' | [:deployment_3, :deployment_1, :deployment_2]
+ 'updated_at' | 'asc' | [:deployment_2, :deployment_3, :deployment_1]
+ 'updated_at' | 'desc' | [:deployment_1, :deployment_3, :deployment_2]
end
with_them do
@@ -137,14 +139,42 @@ describe API::Deployments do
expect(response).to have_gitlab_http_status(500)
end
+
+ it 'links any merged merge requests to the deployment' do
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'foo'
+ )
+
+ post(
+ api("/projects/#{project.id}/deployments", user),
+ params: {
+ environment: 'production',
+ sha: sha,
+ ref: 'master',
+ tag: false,
+ status: 'success'
+ }
+ )
+
+ deploy = project.deployments.last
+
+ expect(deploy.merge_requests).to eq([mr])
+ end
end
context 'as a developer' do
- it 'creates a new deployment' do
- developer = create(:user)
+ let(:developer) { create(:user) }
+ before do
project.add_developer(developer)
+ end
+ it 'creates a new deployment' do
post(
api("/projects/#{project.id}/deployments", developer),
params: {
@@ -161,6 +191,32 @@ describe API::Deployments do
expect(json_response['sha']).to eq(sha)
expect(json_response['ref']).to eq('master')
end
+
+ it 'links any merged merge requests to the deployment' do
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'foo'
+ )
+
+ post(
+ api("/projects/#{project.id}/deployments", developer),
+ params: {
+ environment: 'production',
+ sha: sha,
+ ref: 'master',
+ tag: false,
+ status: 'success'
+ }
+ )
+
+ deploy = project.deployments.last
+
+ expect(deploy.merge_requests).to eq([mr])
+ end
end
context 'as non member' do
@@ -182,7 +238,7 @@ describe API::Deployments do
end
describe 'PUT /projects/:id/deployments/:deployment_id' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:build) { create(:ci_build, :failed, project: project) }
let(:environment) { create(:environment, project: project) }
let(:deploy) do
@@ -191,7 +247,8 @@ describe API::Deployments do
:failed,
project: project,
environment: environment,
- deployable: nil
+ deployable: nil,
+ sha: project.commit.sha
)
end
@@ -216,6 +273,26 @@ describe API::Deployments do
expect(response).to have_gitlab_http_status(200)
expect(json_response['status']).to eq('success')
end
+
+ it 'links merge requests when the deployment status changes to success', :sidekiq_inline do
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'foo'
+ )
+
+ put(
+ api("/projects/#{project.id}/deployments/#{deploy.id}", user),
+ params: { status: 'success' }
+ )
+
+ deploy = project.deployments.last
+
+ expect(deploy.merge_requests).to eq([mr])
+ end
end
context 'as a developer' do
diff --git a/spec/requests/api/discussions_spec.rb b/spec/requests/api/discussions_spec.rb
index 0420201efe3..68f7d407b54 100644
--- a/spec/requests/api/discussions_spec.rb
+++ b/spec/requests/api/discussions_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Discussions do
diff --git a/spec/requests/api/doorkeeper_access_spec.rb b/spec/requests/api/doorkeeper_access_spec.rb
index cfee3f6c0f8..2a34e623a7e 100644
--- a/spec/requests/api/doorkeeper_access_spec.rb
+++ b/spec/requests/api/doorkeeper_access_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'doorkeeper access' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 745f3c55ac8..aa273e97209 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Environments do
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 992fd5e9c66..9f8d254a00c 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Events do
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 57a57e69a00..dfd14f89dbf 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Features do
@@ -118,14 +120,13 @@ describe API::Features do
post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username, feature_group: 'perf_team' }
expect(response).to have_gitlab_http_status(201)
- expect(json_response).to eq(
- 'name' => 'my_feature',
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'groups', 'value' => ['perf_team'] },
- { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
- ])
+ expect(json_response['name']).to eq('my_feature')
+ expect(json_response['state']).to eq('conditional')
+ expect(json_response['gates']).to contain_exactly(
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] },
+ { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
+ )
end
end
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index 21b67357543..ec18156f49f 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Files do
diff --git a/spec/requests/api/graphql/current_user/todos_query_spec.rb b/spec/requests/api/graphql/current_user/todos_query_spec.rb
new file mode 100644
index 00000000000..82deba0d92c
--- /dev/null
+++ b/spec/requests/api/graphql/current_user/todos_query_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Query current user todos' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:commit_todo) { create(:on_commit_todo, user: current_user, project: create(:project, :repository)) }
+ let_it_be(:issue_todo) { create(:todo, user: current_user, target: create(:issue)) }
+ let_it_be(:merge_request_todo) { create(:todo, user: current_user, target: create(:merge_request)) }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('todos'.classify)}
+ }
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for('currentUser', {}, query_graphql_field('todos', {}, fields))
+ end
+
+ subject { graphql_data.dig('currentUser', 'todos', 'nodes') }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'contains the expected ids' do
+ is_expected.to include(
+ a_hash_including('id' => commit_todo.to_global_id.to_s),
+ a_hash_including('id' => issue_todo.to_global_id.to_s),
+ a_hash_including('id' => merge_request_todo.to_global_id.to_s)
+ )
+ end
+
+ it 'returns Todos for all target types' do
+ is_expected.to include(
+ a_hash_including('targetType' => 'COMMIT'),
+ a_hash_including('targetType' => 'ISSUE'),
+ a_hash_including('targetType' => 'MERGEREQUEST')
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/current_user_query_spec.rb b/spec/requests/api/graphql/current_user_query_spec.rb
new file mode 100644
index 00000000000..9db638ea59e
--- /dev/null
+++ b/spec/requests/api/graphql/current_user_query_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'getting project information' do
+ include GraphqlHelpers
+
+ let(:query) do
+ graphql_query_for('currentUser', {}, 'name')
+ end
+
+ subject { graphql_data['currentUser'] }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'when there is a current_user' do
+ set(:current_user) { create(:user) }
+
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to include('name' => current_user.name) }
+ end
+
+ context 'when there is no current_user' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to be_nil }
+ end
+end
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index 1e799a0a42a..2aeb75a10b4 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'GitlabSchema configurations' do
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
new file mode 100644
index 00000000000..8f908b7bf88
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Setting assignees of a merge request' do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:assignee) { create(:user) }
+ let(:assignee2) { create(:user) }
+ let(:input) { { assignee_usernames: [assignee.username] } }
+ let(:expected_result) do
+ [{ 'username' => assignee.username }]
+ end
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_set_assignees, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ mergeRequest {
+ id
+ assignees {
+ nodes {
+ username
+ }
+ }
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_set_assignees)
+ end
+
+ def mutation_assignee_nodes
+ mutation_response['mergeRequest']['assignees']['nodes']
+ end
+
+ before do
+ project.add_developer(current_user)
+ project.add_developer(assignee)
+ project.add_developer(assignee2)
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'does not allow members without the right permission to add assignees' do
+ user = create(:user)
+ project.add_guest(user)
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ context 'with assignees already assigned' do
+ before do
+ merge_request.assignees = [assignee2]
+ merge_request.save!
+ end
+
+ it 'replaces the assignee' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_assignee_nodes).to match_array(expected_result)
+ end
+ end
+
+ context 'when passing an empty list of assignees' do
+ let(:input) { { assignee_usernames: [] } }
+
+ before do
+ merge_request.assignees = [assignee2]
+ merge_request.save!
+ end
+
+ it 'removes assignee' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_assignee_nodes).to eq([])
+ end
+ end
+
+ context 'when passing append as true' do
+ let(:input) { { assignee_usernames: [assignee2.username], operation_mode: Types::MutationOperationModeEnum.enum[:append] } }
+
+ before do
+ # In CE, APPEND is a NOOP as you can't have multiple assignees
+ # We test multiple assignment in EE specs
+ stub_licensed_features(multiple_merge_request_assignees: false)
+
+ merge_request.assignees = [assignee]
+ merge_request.save!
+ end
+
+ it 'does not replace the assignee in CE' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_assignee_nodes).to match_array(expected_result)
+ end
+ end
+
+ context 'when passing remove as true' do
+ let(:input) { { assignee_usernames: [assignee.username], operation_mode: Types::MutationOperationModeEnum.enum[:remove] } }
+ let(:expected_result) { [] }
+
+ before do
+ merge_request.assignees = [assignee]
+ merge_request.save!
+ end
+
+ it 'removes the users in the list, while adding none' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_assignee_nodes).to match_array(expected_result)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb
new file mode 100644
index 00000000000..2112ff0dc74
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Setting labels of a merge request' do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:label) { create(:label, project: project) }
+ let(:label2) { create(:label, project: project) }
+ let(:input) { { label_ids: [GitlabSchema.id_from_object(label).to_s] } }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_set_labels, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ mergeRequest {
+ id
+ labels {
+ nodes {
+ id
+ }
+ }
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_set_labels)
+ end
+
+ def mutation_label_nodes
+ mutation_response['mergeRequest']['labels']['nodes']
+ end
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'sets the merge request labels, removing existing ones' do
+ merge_request.update(labels: [label2])
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_label_nodes.count).to eq(1)
+ expect(mutation_label_nodes[0]['id']).to eq(label.to_global_id.to_s)
+ end
+
+ context 'when passing label_ids empty array as input' do
+ let(:input) { { label_ids: [] } }
+
+ it 'removes the merge request labels' do
+ merge_request.update!(labels: [label])
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_label_nodes.count).to eq(0)
+ end
+ end
+
+ context 'when passing operation_mode as APPEND' do
+ let(:input) { { operation_mode: Types::MutationOperationModeEnum.enum[:append], label_ids: [GitlabSchema.id_from_object(label).to_s] } }
+
+ before do
+ merge_request.update!(labels: [label2])
+ end
+
+ it 'sets the labels, without removing others' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_label_nodes.count).to eq(2)
+ expect(mutation_label_nodes).to contain_exactly({ 'id' => label.to_global_id.to_s }, { 'id' => label2.to_global_id.to_s })
+ end
+ end
+
+ context 'when passing operation_mode as REMOVE' do
+ let(:input) { { operation_mode: Types::MutationOperationModeEnum.enum[:remove], label_ids: [GitlabSchema.id_from_object(label).to_s] } }
+
+ before do
+ merge_request.update!(labels: [label, label2])
+ end
+
+ it 'removes the labels, without removing others' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_label_nodes.count).to eq(1)
+ expect(mutation_label_nodes[0]['id']).to eq(label2.to_global_id.to_s)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb
new file mode 100644
index 00000000000..c45da613591
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Setting locked status of a merge request' do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:input) { { locked: true } }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_set_locked, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ mergeRequest {
+ id
+ discussionLocked
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_set_locked)['mergeRequest']['discussionLocked']
+ end
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'marks the merge request as WIP' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(true)
+ end
+
+ it 'does not do anything if the merge request was already locked' do
+ merge_request.update!(discussion_locked: true)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(true)
+ end
+
+ context 'when passing locked false as input' do
+ let(:input) { { locked: false } }
+
+ it 'does not do anything if the merge request was not marked locked' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(false)
+ end
+
+ it 'unmarks the merge request as locked' do
+ merge_request.update!(discussion_locked: true)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(false)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb
new file mode 100644
index 00000000000..bd558edf9c5
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Setting milestone of a merge request' do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:input) { { milestone_id: GitlabSchema.id_from_object(milestone).to_s } }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_set_milestone, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ mergeRequest {
+ id
+ milestone {
+ id
+ }
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_set_milestone)
+ end
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'sets the merge request milestone' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['mergeRequest']['milestone']['id']).to eq(milestone.to_global_id.to_s)
+ end
+
+ context 'when passing milestone_id nil as input' do
+ let(:input) { { milestone_id: nil } }
+
+ it 'removes the merge request milestone' do
+ merge_request.update!(milestone: milestone)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['mergeRequest']['milestone']).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
new file mode 100644
index 00000000000..975735bf246
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Setting subscribed status of a merge request' do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:input) { { subscribed_state: true } }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_set_subscription, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ mergeRequest {
+ id
+ subscribed
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_set_subscription)['mergeRequest']['subscribed']
+ end
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'marks the merge request as WIP' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(true)
+ end
+
+ context 'when passing subscribe false as input' do
+ let(:input) { { subscribed_state: false } }
+
+ it 'unmarks the merge request as subscribed' do
+ merge_request.subscribe(current_user, project)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(false)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_wip_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_wip_spec.rb
index bbc477ba485..4492c51dbd7 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_wip_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_wip_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Setting WIP status of a merge request' do
diff --git a/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb b/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb
new file mode 100644
index 00000000000..fabbb3aeb49
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Marking todos done' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending) }
+ let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done) }
+
+ let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
+
+ let(:input) { { id: todo1.to_global_id.to_s } }
+
+ let(:mutation) do
+ graphql_mutation(:todo_mark_done, input,
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ todo {
+ id
+ state
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:todo_mark_done)
+ end
+
+ it 'marks a single todo as done' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(todo1.reload.state).to eq('done')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+
+ todo = mutation_response['todo']
+ expect(todo['id']).to eq(todo1.to_global_id.to_s)
+ expect(todo['state']).to eq('done')
+ end
+
+ context 'when todo is already marked done' do
+ let(:input) { { id: todo2.to_global_id.to_s } }
+
+ it 'has the expected response' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+
+ todo = mutation_response['todo']
+ expect(todo['id']).to eq(todo2.to_global_id.to_s)
+ expect(todo['state']).to eq('done')
+ end
+ end
+
+ context 'when todo does not belong to requesting user' do
+ let(:input) { { id: other_user_todo.to_global_id.to_s } }
+ let(:access_error) { 'The resource that you are attempting to access does not exist or you don\'t have permission to perform this action' }
+
+ it 'contains the expected error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ errors = json_response['errors']
+ expect(errors).not_to be_blank
+ expect(errors.first['message']).to eq(access_error)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+ end
+ end
+
+ context 'when using an invalid gid' do
+ let(:input) { { id: 'invalid_gid' } }
+ let(:invalid_gid_error) { 'invalid_gid is not a valid GitLab id.' }
+
+ it 'contains the expected error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ errors = json_response['errors']
+ expect(errors).not_to be_blank
+ expect(errors.first['message']).to eq(invalid_gid_error)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('done')
+ expect(other_user_todo.reload.state).to eq('pending')
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index 4f9f916f22e..4ce7a3912a3 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'getting an issue list for a project' do
@@ -62,7 +64,7 @@ describe 'getting an issue list for a project' do
end
end
- it "is expected to check permissions on the first issue only" do
+ it 'is expected to check permissions on the first issue only' do
allow(Ability).to receive(:allowed?).and_call_original
# Newest first, we only want to see the newest checked
expect(Ability).not_to receive(:allowed?).with(current_user, :read_issue, issues.first)
@@ -114,4 +116,141 @@ describe 'getting an issue list for a project' do
end
end
end
+
+ describe 'sorting and pagination' do
+ let(:start_cursor) { graphql_data['project']['issues']['pageInfo']['startCursor'] }
+ let(:end_cursor) { graphql_data['project']['issues']['pageInfo']['endCursor'] }
+
+ context 'when sorting by due date' do
+ let(:sort_project) { create(:project, :public) }
+
+ let!(:due_issue1) { create(:issue, project: sort_project, due_date: 3.days.from_now) }
+ let!(:due_issue2) { create(:issue, project: sort_project, due_date: nil) }
+ let!(:due_issue3) { create(:issue, project: sort_project, due_date: 2.days.ago) }
+ let!(:due_issue4) { create(:issue, project: sort_project, due_date: nil) }
+ let!(:due_issue5) { create(:issue, project: sort_project, due_date: 1.day.ago) }
+
+ let(:params) { 'sort: DUE_DATE_ASC' }
+
+ def query(issue_params = params)
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => sort_project.full_path },
+ <<~ISSUES
+ issues(#{issue_params}) {
+ pageInfo {
+ endCursor
+ }
+ edges {
+ node {
+ iid
+ dueDate
+ }
+ }
+ }
+ ISSUES
+ )
+ end
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ context 'when ascending' do
+ it 'sorts issues' do
+ expect(grab_iids).to eq [due_issue3.iid, due_issue5.iid, due_issue1.iid, due_issue4.iid, due_issue2.iid]
+ end
+
+ context 'when paginating' do
+ let(:params) { 'sort: DUE_DATE_ASC, first: 2' }
+
+ it 'sorts issues' do
+ expect(grab_iids).to eq [due_issue3.iid, due_issue5.iid]
+
+ cursored_query = query("sort: DUE_DATE_ASC, after: \"#{end_cursor}\"")
+ post_graphql(cursored_query, current_user: current_user)
+ response_data = JSON.parse(response.body)['data']['project']['issues']['edges']
+
+ expect(grab_iids(response_data)).to eq [due_issue1.iid, due_issue4.iid, due_issue2.iid]
+ end
+ end
+ end
+
+ context 'when descending' do
+ let(:params) { 'sort: DUE_DATE_DESC' }
+
+ it 'sorts issues' do
+ expect(grab_iids).to eq [due_issue1.iid, due_issue5.iid, due_issue3.iid, due_issue4.iid, due_issue2.iid]
+ end
+
+ context 'when paginating' do
+ let(:params) { 'sort: DUE_DATE_DESC, first: 2' }
+
+ it 'sorts issues' do
+ expect(grab_iids).to eq [due_issue1.iid, due_issue5.iid]
+
+ cursored_query = query("sort: DUE_DATE_DESC, after: \"#{end_cursor}\"")
+ post_graphql(cursored_query, current_user: current_user)
+ response_data = JSON.parse(response.body)['data']['project']['issues']['edges']
+
+ expect(grab_iids(response_data)).to eq [due_issue3.iid, due_issue4.iid, due_issue2.iid]
+ end
+ end
+ end
+ end
+
+ context 'when sorting by relative position' do
+ let(:sort_project) { create(:project, :public) }
+
+ let!(:relative_issue1) { create(:issue, project: sort_project, relative_position: 2000) }
+ let!(:relative_issue2) { create(:issue, project: sort_project, relative_position: nil) }
+ let!(:relative_issue3) { create(:issue, project: sort_project, relative_position: 1000) }
+ let!(:relative_issue4) { create(:issue, project: sort_project, relative_position: nil) }
+ let!(:relative_issue5) { create(:issue, project: sort_project, relative_position: 500) }
+
+ let(:params) { 'sort: RELATIVE_POSITION_ASC' }
+
+ def query(issue_params = params)
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => sort_project.full_path },
+ "issues(#{issue_params}) { pageInfo { endCursor} edges { node { iid dueDate } } }"
+ )
+ end
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ context 'when ascending' do
+ it 'sorts issues' do
+ expect(grab_iids).to eq [relative_issue5.iid, relative_issue3.iid, relative_issue1.iid, relative_issue4.iid, relative_issue2.iid]
+ end
+
+ context 'when paginating' do
+ let(:params) { 'sort: RELATIVE_POSITION_ASC, first: 2' }
+
+ it 'sorts issues' do
+ expect(grab_iids).to eq [relative_issue5.iid, relative_issue3.iid]
+
+ cursored_query = query("sort: RELATIVE_POSITION_ASC, after: \"#{end_cursor}\"")
+ post_graphql(cursored_query, current_user: current_user)
+ response_data = JSON.parse(response.body)['data']['project']['issues']['edges']
+
+ expect(grab_iids(response_data)).to eq [relative_issue1.iid, relative_issue4.iid, relative_issue2.iid]
+ end
+ end
+ end
+ end
+ end
+
+ def grab_iids(data = issues_data)
+ data.map do |issue|
+ issue.dig('node', 'iid').to_i
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 74820d39102..70c21666799 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'getting merge request information nested in a project' do
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 0727ada4691..fbb22958d51 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'getting project information' do
diff --git a/spec/requests/api/group_boards_spec.rb b/spec/requests/api/group_boards_spec.rb
index b400a7f55ef..232ec9aca32 100644
--- a/spec/requests/api/group_boards_spec.rb
+++ b/spec/requests/api/group_boards_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::GroupBoards do
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index 46e3dd650cc..97465647a87 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -286,12 +286,15 @@ describe API::GroupClusters do
let(:update_params) do
{
domain: domain,
- platform_kubernetes_attributes: platform_kubernetes_attributes
+ platform_kubernetes_attributes: platform_kubernetes_attributes,
+ management_project_id: management_project_id
}
end
let(:domain) { 'new-domain.com' }
let(:platform_kubernetes_attributes) { {} }
+ let(:management_project) { create(:project, group: group) }
+ let(:management_project_id) { management_project.id }
let(:cluster) do
create(:cluster, :group, :provided_by_gcp,
@@ -308,6 +311,8 @@ describe API::GroupClusters do
context 'authorized user' do
before do
+ management_project.add_maintainer(current_user)
+
put api("/groups/#{group.id}/clusters/#{cluster.id}", current_user), params: update_params
cluster.reload
@@ -320,6 +325,7 @@ describe API::GroupClusters do
it 'updates cluster attributes' do
expect(cluster.domain).to eq('new-domain.com')
+ expect(cluster.management_project).to eq(management_project)
end
end
@@ -332,6 +338,7 @@ describe API::GroupClusters do
it 'does not update cluster attributes' do
expect(cluster.domain).to eq('old-domain.com')
+ expect(cluster.management_project).to be_nil
end
it 'returns validation errors' do
@@ -339,6 +346,18 @@ describe API::GroupClusters do
end
end
+ context 'current user does not have access to management_project_id' do
+ let(:management_project_id) { create(:project).id }
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'returns validation errors' do
+ expect(json_response['message']['management_project_id'].first).to match('don\'t have permission')
+ end
+ end
+
context 'with a GCP cluster' do
context 'when user tries to change GCP specific fields' do
let(:platform_kubernetes_attributes) do
diff --git a/spec/requests/api/group_container_repositories_spec.rb b/spec/requests/api/group_container_repositories_spec.rb
index 0a41e455d01..785006253d8 100644
--- a/spec/requests/api/group_container_repositories_spec.rb
+++ b/spec/requests/api/group_container_repositories_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe API::GroupContainerRepositories do
- set(:group) { create(:group, :private) }
- set(:project) { create(:project, :private, group: group) }
- let(:reporter) { create(:user) }
- let(:guest) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :private, group: group) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
let(:root_repository) { create(:container_repository, :root, project: project) }
let(:test_repository) { create(:container_repository, project: project) }
@@ -44,6 +44,8 @@ describe API::GroupContainerRepositories do
let(:object) { group }
end
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+
context 'with invalid group id' do
let(:url) { '/groups/123412341234/registry/repositories' }
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
new file mode 100644
index 00000000000..ac4853e5388
--- /dev/null
+++ b/spec/requests/api/group_export_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::GroupExport do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ let(:path) { "/groups/#{group.id}/export" }
+ let(:download_path) { "/groups/#{group.id}/export/download" }
+
+ let(:export_path) { "#{Dir.tmpdir}/group_export_spec" }
+
+ before do
+ allow_next_instance_of(Gitlab::ImportExport) do |import_export|
+ expect(import_export).to receive(:storage_path).and_return(export_path)
+ end
+ end
+
+ after do
+ FileUtils.rm_rf(export_path, secure: true)
+ end
+
+ describe 'GET /groups/:group_id/export/download' do
+ let(:upload) { ImportExportUpload.new(group: group) }
+
+ before do
+ stub_uploads_object_storage(ImportExportUploader)
+
+ group.add_owner(user)
+ end
+
+ context 'when export file exists' do
+ before do
+ upload.export_file = fixture_file_upload('spec/fixtures/group_export.tar.gz', "`/tar.gz")
+ upload.save!
+ end
+
+ it 'downloads exported group archive' do
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ context 'when export_file.file does not exist' do
+ before do
+ expect_next_instance_of(ImportExportUploader) do |uploader|
+ expect(uploader).to receive(:file).and_return(nil)
+ end
+ end
+
+ it 'returns 404' do
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when export file does not exist' do
+ it 'returns 404' do
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ describe 'POST /groups/:group_id/export' do
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'accepts download' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(202)
+ end
+ end
+
+ context 'when user is not a group owner' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'forbids the request' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index 6980eb7f55d..3e9b6246434 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::GroupMilestones do
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index d50bae3dc47..abdc3a40360 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::GroupVariables do
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 902a5ec2a86..cb97398805a 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Groups do
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index a1a007811fe..bbfe40041a1 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'raven/transports/dummy'
require_relative '../../../config/initializers/sentry'
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index 68df02d4d8d..3ff7102479c 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ImportGithub do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 01a2e33c0d9..fcff2cde730 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Internal::Base do
@@ -316,6 +318,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-get-all-lfs-pointers-go' => 'true', 'gitaly-feature-inforef-uploadpack-cache' => 'true')
expect(user.reload.last_activity_on).to eql(Date.today)
end
end
@@ -335,6 +338,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-get-all-lfs-pointers-go' => 'true', 'gitaly-feature-inforef-uploadpack-cache' => 'true')
expect(user.reload.last_activity_on).to be_nil
end
end
@@ -407,7 +411,6 @@ describe API::Internal::Base do
context "custom action" do
let(:access_checker) { double(Gitlab::GitAccess) }
- let(:message) { 'CustomActionError message' }
let(:payload) do
{
'action' => 'geo_proxy_to_primary',
@@ -418,8 +421,8 @@ describe API::Internal::Base do
}
}
end
-
- let(:custom_action_result) { Gitlab::GitAccessResult::CustomAction.new(payload, message) }
+ let(:console_messages) { ['informational message'] }
+ let(:custom_action_result) { Gitlab::GitAccessResult::CustomAction.new(payload, console_messages) }
before do
project.add_guest(user)
@@ -446,8 +449,8 @@ describe API::Internal::Base do
expect(response).to have_gitlab_http_status(300)
expect(json_response['status']).to be_truthy
- expect(json_response['message']).to eql(message)
expect(json_response['payload']).to eql(payload)
+ expect(json_response['gl_console_messages']).to eql(console_messages)
expect(user.reload.last_activity_on).to be_nil
end
end
@@ -577,6 +580,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-get-all-lfs-pointers-go' => 'true', 'gitaly-feature-inforef-uploadpack-cache' => 'true')
end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 89ee6f896f9..020e7659a4c 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Jobs do
@@ -595,7 +597,7 @@ describe API::Jobs do
context 'find proper job' do
shared_examples 'a valid file' do
- context 'when artifacts are stored locally' do
+ context 'when artifacts are stored locally', :sidekiq_might_not_need_inline do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
@@ -674,7 +676,7 @@ describe API::Jobs do
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
let(:public_builds) { true }
- it 'allows to access artifacts' do
+ it 'allows to access artifacts', :sidekiq_might_not_need_inline do
expect(response).to have_gitlab_http_status(200)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
@@ -711,7 +713,7 @@ describe API::Jobs do
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
let(:public_builds) { true }
- it 'returns a specific artifact file for a valid path' do
+ it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do
expect(Gitlab::Workhorse)
.to receive(:send_artifacts_entry)
.and_call_original
@@ -732,7 +734,7 @@ describe API::Jobs do
sha: project.commit('improve/awesome').sha)
end
- it 'returns a specific artifact file for a valid path' do
+ it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do
get_artifact_file(artifact, 'improve/awesome')
expect(response).to have_gitlab_http_status(200)
diff --git a/spec/requests/api/keys_spec.rb b/spec/requests/api/keys_spec.rb
index f37d84fddef..6802a0cfdab 100644
--- a/spec/requests/api/keys_spec.rb
+++ b/spec/requests/api/keys_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Keys do
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index 7089da3d351..d027738c8db 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Labels do
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index f52cdf1c459..46d23bd16b9 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Lint do
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 0cf5c5677b9..99263f2fc1e 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require "spec_helper"
describe API::Markdown do
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index eb55d747179..f2942020e16 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Members do
@@ -24,7 +26,7 @@ describe API::Members do
shared_examples 'GET /:source_type/:id/members/(all)' do |source_type, all|
let(:members_url) do
- "/#{source_type.pluralize}/#{source.id}/members".tap do |url|
+ (+"/#{source_type.pluralize}/#{source.id}/members").tap do |url|
url << "/all" if all
end
end
@@ -149,9 +151,15 @@ describe API::Members do
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.map { |u| u['id'] }).to eq [developer.id, maintainer.id, nested_user.id, project_user.id, linked_group_user.id]
- expect(json_response.map { |u| u['access_level'] }).to eq [Gitlab::Access::DEVELOPER, Gitlab::Access::OWNER, Gitlab::Access::DEVELOPER,
- Gitlab::Access::DEVELOPER, Gitlab::Access::DEVELOPER]
+
+ expected_users_and_access_levels = [
+ [developer.id, Gitlab::Access::DEVELOPER],
+ [maintainer.id, Gitlab::Access::OWNER],
+ [nested_user.id, Gitlab::Access::DEVELOPER],
+ [project_user.id, Gitlab::Access::DEVELOPER],
+ [linked_group_user.id, Gitlab::Access::DEVELOPER]
+ ]
+ expect(json_response.map { |u| [u['id'], u['access_level']] }).to match_array(expected_users_and_access_levels)
end
it 'finds all group members including inherited members' do
diff --git a/spec/requests/api/merge_request_diffs_spec.rb b/spec/requests/api/merge_request_diffs_spec.rb
index 8a67d98fc4c..9de76c2fe50 100644
--- a/spec/requests/api/merge_request_diffs_spec.rb
+++ b/spec/requests/api/merge_request_diffs_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require "spec_helper"
describe API::MergeRequestDiffs, 'MergeRequestDiffs' do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 05160a33e61..c96c80b6998 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require "spec_helper"
describe API::MergeRequests do
@@ -10,7 +12,7 @@ describe API::MergeRequests do
let(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
let(:milestone1) { create(:milestone, title: '0.9', project: project) }
- let!(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Test", created_at: base_time) }
+ let!(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, source_branch: 'markdown', title: "Test", created_at: base_time) }
let!(:merge_request_closed) { create(:merge_request, state: "closed", milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Closed test", created_at: base_time + 1.second) }
let!(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignees: [user], source_project: project, target_project: project, title: "Merged test", created_at: base_time + 2.seconds, merge_commit_sha: '9999999999999999999999999999999999999999') }
let!(:merge_request_locked) { create(:merge_request, state: "locked", milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Locked test", created_at: base_time + 1.second) }
@@ -699,16 +701,20 @@ describe API::MergeRequests do
expect(json_response.first['id']).to eq merge_request_closed.id
end
- it 'avoids N+1 queries' do
- control = ActiveRecord::QueryRecorder.new do
- get api("/projects/#{project.id}/merge_requests", user)
- end.count
+ context 'a project which enforces all discussions to be resolved' do
+ let!(:project) { create(:project, :repository, only_allow_merge_if_all_discussions_are_resolved: true) }
- create(:merge_request, author: user, assignees: [user], source_project: project, target_project: project, created_at: base_time)
+ it 'avoids N+1 queries' do
+ control = ActiveRecord::QueryRecorder.new do
+ get api("/projects/#{project.id}/merge_requests", user)
+ end.count
- expect do
- get api("/projects/#{project.id}/merge_requests", user)
- end.not_to exceed_query_limit(control)
+ create(:merge_request, author: user, assignees: [user], source_project: project, target_project: project, created_at: base_time)
+
+ expect do
+ get api("/projects/#{project.id}/merge_requests", user)
+ end.not_to exceed_query_limit(control)
+ end
end
end
@@ -775,6 +781,8 @@ describe API::MergeRequests do
expect(json_response['merge_error']).to eq(merge_request.merge_error)
expect(json_response['user']['can_merge']).to be_truthy
expect(json_response).not_to include('rebase_in_progress')
+ expect(json_response['has_conflicts']).to be_falsy
+ expect(json_response['blocking_discussions_resolved']).to be_truthy
end
it 'exposes description and title html when render_html is true' do
@@ -921,7 +929,7 @@ describe API::MergeRequests do
allow_collaboration: true)
end
- it 'includes the `allow_collaboration` field' do
+ it 'includes the `allow_collaboration` field', :sidekiq_might_not_need_inline do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
expect(json_response['allow_collaboration']).to be_truthy
@@ -1035,14 +1043,12 @@ describe API::MergeRequests do
describe 'POST /projects/:id/merge_requests/:merge_request_iid/pipelines' do
before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:ci_yaml_file)
- .and_return(YAML.dump({
- rspec: {
- script: 'ls',
- only: ['merge_requests']
- }
- }))
+ stub_ci_pipeline_yaml_file(YAML.dump({
+ rspec: {
+ script: 'ls',
+ only: ['merge_requests']
+ }
+ }))
end
let(:project) do
@@ -1326,7 +1332,7 @@ describe API::MergeRequests do
context 'accepts remove_source_branch parameter' do
let(:params) do
{ title: 'Test merge_request',
- source_branch: 'markdown',
+ source_branch: 'feature_conflict',
target_branch: 'master',
author: user }
end
@@ -1406,7 +1412,7 @@ describe API::MergeRequests do
expect(response).to have_gitlab_http_status(400)
end
- it 'allows setting `allow_collaboration`' do
+ it 'allows setting `allow_collaboration`', :sidekiq_might_not_need_inline do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', source_branch: "feature_conflict", target_branch: "master", author: user2, target_project_id: project.id, allow_collaboration: true }
expect(response).to have_gitlab_http_status(201)
@@ -1438,7 +1444,7 @@ describe API::MergeRequests do
end
end
- it "returns 201 when target_branch is specified and for the same project" do
+ it "returns 201 when target_branch is specified and for the same project", :sidekiq_might_not_need_inline do
post api("/projects/#{forked_project.id}/merge_requests", user2),
params: { title: 'Test merge_request', target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: forked_project.id }
expect(response).to have_gitlab_http_status(201)
@@ -1486,7 +1492,7 @@ describe API::MergeRequests do
end
describe "PUT /projects/:id/merge_requests/:merge_request_iid/merge" do
- let(:pipeline) { create(:ci_pipeline_without_jobs) }
+ let(:pipeline) { create(:ci_pipeline) }
it "returns merge_request in case of success" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
@@ -1633,6 +1639,21 @@ describe API::MergeRequests do
expect(source_repository.branch_exists?(source_branch)).to be_falsy
end
end
+
+ context "performing a ff-merge with squash" do
+ let(:merge_request) { create(:merge_request, :rebased, source_project: project, squash: true) }
+
+ before do
+ project.update(merge_requests_ff_only_enabled: true)
+ end
+
+ it "records the squash commit SHA and returns it in the response" do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['squash_commit_sha'].length).to eq(40)
+ end
+ end
end
describe "GET /projects/:id/merge_requests/:merge_request_iid/merge_ref", :clean_gitlab_redis_shared_state do
@@ -2152,6 +2173,16 @@ describe API::MergeRequests do
expect(response).to have_gitlab_http_status(409)
end
+
+ it "returns 409 if rebase can't lock the row" do
+ allow_any_instance_of(MergeRequest).to receive(:with_lock).and_raise(ActiveRecord::LockWaitTimeout)
+ expect(RebaseWorker).not_to receive(:perform_async)
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user)
+
+ expect(response).to have_gitlab_http_status(409)
+ expect(json_response['message']).to eq(MergeRequest::REBASE_LOCK_MESSAGE)
+ end
end
describe 'Time tracking' do
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 2e376109b42..e0bf1509be3 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Namespaces do
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index 6c1e30791d2..e57d7699892 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Notes do
diff --git a/spec/requests/api/notification_settings_spec.rb b/spec/requests/api/notification_settings_spec.rb
index 4ed667ad0dc..09fc0197c58 100644
--- a/spec/requests/api/notification_settings_spec.rb
+++ b/spec/requests/api/notification_settings_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::NotificationSettings do
diff --git a/spec/requests/api/oauth_tokens_spec.rb b/spec/requests/api/oauth_tokens_spec.rb
index 3811ec751de..8d7b3fa3c09 100644
--- a/spec/requests/api/oauth_tokens_spec.rb
+++ b/spec/requests/api/oauth_tokens_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'OAuth tokens' do
diff --git a/spec/requests/api/pages/internal_access_spec.rb b/spec/requests/api/pages/internal_access_spec.rb
index 28abe1a8456..821a210a414 100644
--- a/spec/requests/api/pages/internal_access_spec.rb
+++ b/spec/requests/api/pages/internal_access_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe "Internal Project Pages Access" do
diff --git a/spec/requests/api/pages/private_access_spec.rb b/spec/requests/api/pages/private_access_spec.rb
index 6af441caf74..ec84762b05a 100644
--- a/spec/requests/api/pages/private_access_spec.rb
+++ b/spec/requests/api/pages/private_access_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe "Private Project Pages Access" do
diff --git a/spec/requests/api/pages/public_access_spec.rb b/spec/requests/api/pages/public_access_spec.rb
index d99224eca5b..67b8cfb8fbc 100644
--- a/spec/requests/api/pages/public_access_spec.rb
+++ b/spec/requests/api/pages/public_access_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe "Public Project Pages Access" do
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index 326b724666d..6b774e9335e 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -1,15 +1,22 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::PagesDomains do
- set(:project) { create(:project, path: 'my.project', pages_https_only: false) }
- set(:user) { create(:user) }
- set(:admin) { create(:admin) }
+ let_it_be(:project) { create(:project, path: 'my.project', pages_https_only: false) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
- set(:pages_domain) { create(:pages_domain, :without_key, :without_certificate, domain: 'www.domain.test', project: project) }
- set(:pages_domain_secure) { create(:pages_domain, domain: 'ssl.domain.test', project: project) }
- set(:pages_domain_expired) { create(:pages_domain, :with_expired_certificate, domain: 'expired.domain.test', project: project) }
+ let_it_be(:pages_domain) { create(:pages_domain, :without_key, :without_certificate, domain: 'www.domain.test', project: project) }
+ let_it_be(:pages_domain_secure) { create(:pages_domain, domain: 'ssl.domain.test', project: project) }
+ let_it_be(:pages_domain_with_letsencrypt) { create(:pages_domain, :letsencrypt, domain: 'letsencrypt.domain.test', project: project) }
+ let_it_be(:pages_domain_expired) { create(:pages_domain, :with_expired_certificate, domain: 'expired.domain.test', project: project) }
let(:pages_domain_params) { build(:pages_domain, :without_key, :without_certificate, domain: 'www.other-domain.test').slice(:domain) }
+ let(:pages_domain_with_letsencrypt_params) do
+ build(:pages_domain, :without_key, :without_certificate, domain: 'www.other-domain.test', auto_ssl_enabled: true)
+ .slice(:domain, :auto_ssl_enabled)
+ end
let(:pages_domain_secure_params) { build(:pages_domain, domain: 'ssl.other-domain.test', project: project).slice(:domain, :certificate, :key) }
let(:pages_domain_secure_key_missmatch_params) {build(:pages_domain, :with_trusted_chain, project: project).slice(:domain, :certificate, :key) }
let(:pages_domain_secure_missing_chain_params) {build(:pages_domain, :with_missing_chain, project: project).slice(:certificate) }
@@ -20,6 +27,7 @@ describe API::PagesDomains do
let(:route_secure_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain_secure.domain}" }
let(:route_expired_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain_expired.domain}" }
let(:route_vacant_domain) { "/projects/#{project.id}/pages/domains/www.vacant-domain.test" }
+ let(:route_letsencrypt_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain_with_letsencrypt.domain}" }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
@@ -45,9 +53,10 @@ describe API::PagesDomains do
expect(response).to match_response_schema('public_api/v4/pages_domain_basics')
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.size).to eq(3)
+ expect(json_response.size).to eq(4)
expect(json_response.last).to have_key('domain')
expect(json_response.last).to have_key('project_id')
+ expect(json_response.last).to have_key('auto_ssl_enabled')
expect(json_response.last).to have_key('certificate_expiration')
expect(json_response.last['certificate_expiration']['expired']).to be true
expect(json_response.first).not_to have_key('certificate_expiration')
@@ -71,7 +80,7 @@ describe API::PagesDomains do
expect(response).to match_response_schema('public_api/v4/pages_domains')
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.size).to eq(3)
+ expect(json_response.size).to eq(4)
expect(json_response.map { |pages_domain| pages_domain['domain'] }).to include(pages_domain.domain)
expect(json_response.last).to have_key('domain')
end
@@ -164,6 +173,7 @@ describe API::PagesDomains do
expect(json_response['url']).to eq(pages_domain_secure.url)
expect(json_response['certificate']['subject']).to eq(pages_domain_secure.subject)
expect(json_response['certificate']['expired']).to be false
+ expect(json_response['auto_ssl_enabled']).to be false
end
it 'returns pages domain with an expired certificate' do
@@ -173,6 +183,18 @@ describe API::PagesDomains do
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['certificate']['expired']).to be true
end
+
+ it 'returns pages domain with letsencrypt' do
+ get api(route_letsencrypt_domain, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
+ expect(json_response['domain']).to eq(pages_domain_with_letsencrypt.domain)
+ expect(json_response['url']).to eq(pages_domain_with_letsencrypt.url)
+ expect(json_response['certificate']['subject']).to eq(pages_domain_with_letsencrypt.subject)
+ expect(json_response['certificate']['expired']).to be false
+ expect(json_response['auto_ssl_enabled']).to be true
+ end
end
context 'when domain is vacant' do
@@ -244,6 +266,7 @@ describe API::PagesDomains do
expect(pages_domain.domain).to eq(params[:domain])
expect(pages_domain.certificate).to be_nil
expect(pages_domain.key).to be_nil
+ expect(pages_domain.auto_ssl_enabled).to be false
end
it 'creates a new secure pages domain' do
@@ -255,6 +278,29 @@ describe API::PagesDomains do
expect(pages_domain.domain).to eq(params_secure[:domain])
expect(pages_domain.certificate).to eq(params_secure[:certificate])
expect(pages_domain.key).to eq(params_secure[:key])
+ expect(pages_domain.auto_ssl_enabled).to be false
+ end
+
+ it 'creates domain with letsencrypt enabled' do
+ post api(route, user), params: pages_domain_with_letsencrypt_params
+ pages_domain = PagesDomain.find_by(domain: json_response['domain'])
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
+ expect(pages_domain.domain).to eq(pages_domain_with_letsencrypt_params[:domain])
+ expect(pages_domain.auto_ssl_enabled).to be true
+ end
+
+ it 'creates domain with letsencrypt enabled and provided certificate' do
+ post api(route, user), params: params_secure.merge(auto_ssl_enabled: true)
+ pages_domain = PagesDomain.find_by(domain: json_response['domain'])
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
+ expect(pages_domain.domain).to eq(params_secure[:domain])
+ expect(pages_domain.certificate).to eq(params_secure[:certificate])
+ expect(pages_domain.key).to eq(params_secure[:key])
+ expect(pages_domain.auto_ssl_enabled).to be true
end
it 'fails to create pages domain without key' do
@@ -321,13 +367,14 @@ describe API::PagesDomains do
shared_examples_for 'put pages domain' do
it 'updates pages domain removing certificate' do
- put api(route_secure_domain, user)
+ put api(route_secure_domain, user), params: { certificate: nil, key: nil }
pages_domain_secure.reload
expect(response).to have_gitlab_http_status(200)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_secure.certificate).to be_nil
expect(pages_domain_secure.key).to be_nil
+ expect(pages_domain_secure.auto_ssl_enabled).to be false
end
it 'updates pages domain adding certificate' do
@@ -340,6 +387,37 @@ describe API::PagesDomains do
expect(pages_domain.key).to eq(params_secure[:key])
end
+ it 'updates pages domain adding certificate with letsencrypt' do
+ put api(route_domain, user), params: params_secure.merge(auto_ssl_enabled: true)
+ pages_domain.reload
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
+ expect(pages_domain.certificate).to eq(params_secure[:certificate])
+ expect(pages_domain.key).to eq(params_secure[:key])
+ expect(pages_domain.auto_ssl_enabled).to be true
+ end
+
+ it 'updates pages domain enabling letsencrypt' do
+ put api(route_domain, user), params: { auto_ssl_enabled: true }
+ pages_domain.reload
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
+ expect(pages_domain.auto_ssl_enabled).to be true
+ end
+
+ it 'updates pages domain disabling letsencrypt while preserving the certificate' do
+ put api(route_letsencrypt_domain, user), params: { auto_ssl_enabled: false }
+ pages_domain_with_letsencrypt.reload
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
+ expect(pages_domain_with_letsencrypt.auto_ssl_enabled).to be false
+ expect(pages_domain_with_letsencrypt.key).to be
+ expect(pages_domain_with_letsencrypt.certificate).to be
+ end
+
it 'updates pages domain with expired certificate' do
put api(route_expired_domain, user), params: params_secure
pages_domain_expired.reload
diff --git a/spec/requests/api/pipeline_schedules_spec.rb b/spec/requests/api/pipeline_schedules_spec.rb
index 072bd02f2ac..5c8ccce2e37 100644
--- a/spec/requests/api/pipeline_schedules_spec.rb
+++ b/spec/requests/api/pipeline_schedules_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::PipelineSchedules do
diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb
index 3ac63dc381b..cce52cfc1ca 100644
--- a/spec/requests/api/pipelines_spec.rb
+++ b/spec/requests/api/pipelines_spec.rb
@@ -673,7 +673,7 @@ describe API::Pipelines do
let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
context 'authorized user' do
- it 'retries failed builds' do
+ it 'retries failed builds', :sidekiq_might_not_need_inline do
post api("/projects/#{project.id}/pipelines/#{pipeline.id}/cancel", user)
expect(response).to have_gitlab_http_status(200)
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index a7b919de2ef..04e59238877 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -281,11 +281,14 @@ describe API::ProjectClusters do
let(:api_url) { 'https://kubernetes.example.com' }
let(:namespace) { 'new-namespace' }
let(:platform_kubernetes_attributes) { { namespace: namespace } }
+ let(:management_project) { create(:project, namespace: project.namespace) }
+ let(:management_project_id) { management_project.id }
let(:update_params) do
{
domain: 'new-domain.com',
- platform_kubernetes_attributes: platform_kubernetes_attributes
+ platform_kubernetes_attributes: platform_kubernetes_attributes,
+ management_project_id: management_project_id
}
end
@@ -310,6 +313,8 @@ describe API::ProjectClusters do
context 'authorized user' do
before do
+ management_project.add_maintainer(current_user)
+
put api("/projects/#{project.id}/clusters/#{cluster.id}", current_user), params: update_params
cluster.reload
@@ -323,6 +328,7 @@ describe API::ProjectClusters do
it 'updates cluster attributes' do
expect(cluster.domain).to eq('new-domain.com')
expect(cluster.platform_kubernetes.namespace).to eq('new-namespace')
+ expect(cluster.management_project).to eq(management_project)
end
end
@@ -336,6 +342,7 @@ describe API::ProjectClusters do
it 'does not update cluster attributes' do
expect(cluster.domain).not_to eq('new_domain.com')
expect(cluster.platform_kubernetes.namespace).not_to eq('invalid_namespace')
+ expect(cluster.management_project).not_to eq(management_project)
end
it 'returns validation errors' do
@@ -343,6 +350,18 @@ describe API::ProjectClusters do
end
end
+ context 'current user does not have access to management_project_id' do
+ let(:management_project_id) { create(:project).id }
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'returns validation errors' do
+ expect(json_response['message']['management_project_id'].first).to match('don\'t have permission')
+ end
+ end
+
context 'with a GCP cluster' do
context 'when user tries to change GCP specific fields' do
let(:platform_kubernetes_attributes) do
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 3ac7ff7656b..d04db134db0 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectContainerRepositories do
@@ -44,6 +46,7 @@ describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :guest, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
let(:object) { project }
@@ -55,6 +58,7 @@ describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :developer, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'delete_repository'
context 'for maintainer' do
let(:api_user) { maintainer }
@@ -83,6 +87,8 @@ describe API::ProjectContainerRepositories do
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest))
end
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'list_tags'
+
it 'returns a list of tags' do
subject
@@ -109,6 +115,7 @@ describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :developer, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'delete_tag_bulk'
end
context 'for maintainer' do
@@ -220,6 +227,7 @@ describe API::ProjectContainerRepositories do
it 'properly removes tag' do
expect(service).to receive(:execute).with(root_repository) { { status: :success } }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(root_repository.project, api_user, tags: %w[rootA]) { service }
+ expect(Gitlab::Tracking).to receive(:event).with(described_class.name, 'delete_tag', {})
subject
@@ -235,6 +243,7 @@ describe API::ProjectContainerRepositories do
it 'properly removes tag' do
expect(service).to receive(:execute).with(root_repository) { { status: :success } }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(root_repository.project, api_user, tags: %w[rootA]) { service }
+ expect(Gitlab::Tracking).to receive(:event).with(described_class.name, 'delete_tag', {})
subject
diff --git a/spec/requests/api/project_events_spec.rb b/spec/requests/api/project_events_spec.rb
index 8c2db6e4c62..d466dca9884 100644
--- a/spec/requests/api/project_events_spec.rb
+++ b/spec/requests/api/project_events_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectEvents do
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 7de8935097a..605ff888234 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectExport do
@@ -370,7 +372,7 @@ describe API::ProjectExport do
end
context 'when overriding description' do
- it 'starts' do
+ it 'starts', :sidekiq_might_not_need_inline do
params = { description: "Foo" }
expect_any_instance_of(Projects::ImportExport::ExportService).to receive(:execute)
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index b88a8b95201..06c09b100ac 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectHooks, 'ProjectHooks' do
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index d2b1fb063b8..866adbd424e 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectImport do
@@ -153,7 +155,7 @@ describe API::ProjectImport do
expect(import_project.import_data.data['override_params']).to be_empty
end
- it 'correctly overrides params during the import' do
+ it 'correctly overrides params during the import', :sidekiq_might_not_need_inline do
override_params = { 'description' => 'Hello world' }
perform_enqueued_jobs do
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index 895f05a98e8..df6d83c1e65 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectMilestones do
diff --git a/spec/requests/api/project_snapshots_spec.rb b/spec/requests/api/project_snapshots_spec.rb
index 2857715cdbe..cdd44f71649 100644
--- a/spec/requests/api/project_snapshots_spec.rb
+++ b/spec/requests/api/project_snapshots_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectSnapshots do
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index ef0cabad4b0..cac3f07d0d0 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectSnippets do
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 80e5033dab4..2bf864afe87 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProjectTemplates do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 99d2a68ef53..f1447536e0f 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
shared_examples 'languages and percentages JSON response' do
@@ -15,7 +17,7 @@ shared_examples 'languages and percentages JSON response' do
end
context "when the languages haven't been detected yet" do
- it 'returns expected language values' do
+ it 'returns expected language values', :sidekiq_might_not_need_inline do
get api("/projects/#{project.id}/languages", user)
expect(response).to have_gitlab_http_status(:ok)
@@ -360,6 +362,30 @@ describe API::Projects do
end
end
+ context 'and using id_after' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_after: project2.id } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3].select { |p| p.id > project2.id } }
+ end
+ end
+
+ context 'and using id_before' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_before: project2.id } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3].select { |p| p.id < project2.id } }
+ end
+ end
+
+ context 'and using both id_after and id_before' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_before: project2.id, id_after: public_project.id } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3].select { |p| p.id < project2.id && p.id > public_project.id } }
+ end
+ end
+
context 'and membership=true' do
it_behaves_like 'projects response' do
let(:filter) { { membership: true } }
@@ -606,6 +632,7 @@ describe API::Projects do
merge_requests_enabled: false,
wiki_enabled: false,
resolve_outdated_diff_discussions: false,
+ remove_source_branch_after_merge: true,
only_allow_merge_if_pipeline_succeeds: false,
request_access_enabled: true,
only_allow_merge_if_all_discussions_are_resolved: false,
@@ -722,6 +749,22 @@ describe API::Projects do
expect(json_response['resolve_outdated_diff_discussions']).to be_truthy
end
+ it 'sets a project as not removing source branches' do
+ project = attributes_for(:project, remove_source_branch_after_merge: false)
+
+ post api('/projects', user), params: project
+
+ expect(json_response['remove_source_branch_after_merge']).to be_falsey
+ end
+
+ it 'sets a project as removing source branches' do
+ project = attributes_for(:project, remove_source_branch_after_merge: true)
+
+ post api('/projects', user), params: project
+
+ expect(json_response['remove_source_branch_after_merge']).to be_truthy
+ end
+
it 'sets a project as allowing merge even if build fails' do
project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: false)
@@ -829,6 +872,63 @@ describe API::Projects do
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
end
+ context 'and using id_after' do
+ let!(:another_public_project) { create(:project, :public, name: 'another_public_project', creator_id: user4.id, namespace: user4.namespace) }
+
+ it 'only returns projects with id_after filter given' do
+ get api("/users/#{user4.id}/projects?id_after=#{public_project.id}", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(another_public_project.id)
+ end
+
+ it 'returns both projects without a id_after filter' do
+ get api("/users/#{user4.id}/projects", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id, another_public_project.id)
+ end
+ end
+
+ context 'and using id_before' do
+ let!(:another_public_project) { create(:project, :public, name: 'another_public_project', creator_id: user4.id, namespace: user4.namespace) }
+
+ it 'only returns projects with id_before filter given' do
+ get api("/users/#{user4.id}/projects?id_before=#{another_public_project.id}", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
+ end
+
+ it 'returns both projects without a id_before filter' do
+ get api("/users/#{user4.id}/projects", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id, another_public_project.id)
+ end
+ end
+
+ context 'and using both id_before and id_after' do
+ let!(:more_projects) { create_list(:project, 5, :public, creator_id: user4.id, namespace: user4.namespace) }
+
+ it 'only returns projects with id matching the range' do
+ get api("/users/#{user4.id}/projects?id_after=#{more_projects.first.id}&id_before=#{more_projects.last.id}", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(*more_projects[1..-2].map(&:id))
+ end
+ end
+
it 'returns projects filtered by username' do
get api("/users/#{user4.username}/projects/", user)
@@ -980,6 +1080,22 @@ describe API::Projects do
expect(json_response['resolve_outdated_diff_discussions']).to be_truthy
end
+ it 'sets a project as not removing source branches' do
+ project = attributes_for(:project, remove_source_branch_after_merge: false)
+
+ post api("/projects/user/#{user.id}", admin), params: project
+
+ expect(json_response['remove_source_branch_after_merge']).to be_falsey
+ end
+
+ it 'sets a project as removing source branches' do
+ project = attributes_for(:project, remove_source_branch_after_merge: true)
+
+ post api("/projects/user/#{user.id}", admin), params: project
+
+ expect(json_response['remove_source_branch_after_merge']).to be_truthy
+ end
+
it 'sets a project as allowing merge even if build fails' do
project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: false)
post api("/projects/user/#{user.id}", admin), params: project
@@ -1157,6 +1273,7 @@ describe API::Projects do
expect(json_response['wiki_access_level']).to be_present
expect(json_response['builds_access_level']).to be_present
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
+ expect(json_response['remove_source_branch_after_merge']).to be_truthy
expect(json_response['container_registry_enabled']).to be_present
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index f90558d77a9..67ce704b3f3 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProtectedBranches do
diff --git a/spec/requests/api/protected_tags_spec.rb b/spec/requests/api/protected_tags_spec.rb
index 41363dcc1c3..5a962cd5667 100644
--- a/spec/requests/api/protected_tags_spec.rb
+++ b/spec/requests/api/protected_tags_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::ProtectedTags do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 99d0ceee76b..bf05587fe03 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Releases do
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 6f4bb525c89..ba301147d43 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'mime/types'
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 70a95663aea..6138036b0af 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Runner, :clean_gitlab_redis_shared_state do
@@ -312,7 +314,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:root_namespace) { create(:namespace) }
let(:namespace) { create(:namespace, parent: root_namespace) }
let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
- let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
let(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:job) do
create(:ci_build, :artifacts, :extended_options,
@@ -610,7 +612,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
context 'when job is made for merge request' do
- let(:pipeline) { create(:ci_pipeline_without_jobs, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
+ let(:pipeline) { create(:ci_pipeline, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
let(:merge_request) { create(:merge_request) }
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index d26fbee6957..8daba204d50 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Runners do
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 8abdcaa2e0e..24d7f1e313c 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Search do
@@ -436,6 +438,7 @@ describe API::Search do
expect(response).to have_gitlab_http_status(200)
expect(json_response.size).to eq(2)
+ expect(json_response.first['path']).to eq('PROCESS.md')
expect(json_response.first['filename']).to eq('PROCESS.md')
end
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 7153fcc99d7..a080b59173f 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require "spec_helper"
describe API::Services do
@@ -100,7 +102,7 @@ describe API::Services do
expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
end
- it "returns empty hash if properties and data fields are empty" do
+ it "returns empty hash or nil values if properties and data fields are empty" do
# deprecated services are not valid for update
initialized_service.update_attribute(:properties, {})
@@ -112,7 +114,7 @@ describe API::Services do
get api("/projects/#{project.id}/services/#{dashed_service}", user)
expect(response).to have_gitlab_http_status(200)
- expect(json_response['properties'].keys).to be_empty
+ expect(json_response['properties'].values.compact).to be_empty
end
it "returns error when authenticated but not a project owner" do
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index f3bfb258029..b7586307929 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Settings, 'Settings' do
@@ -16,6 +18,10 @@ describe API::Settings, 'Settings' do
expect(json_response['password_authentication_enabled']).to be_truthy
expect(json_response['plantuml_enabled']).to be_falsey
expect(json_response['plantuml_url']).to be_nil
+ expect(json_response['default_ci_config_path']).to be_nil
+ expect(json_response['sourcegraph_enabled']).to be_falsey
+ expect(json_response['sourcegraph_url']).to be_nil
+ expect(json_response['sourcegraph_public_only']).to be_truthy
expect(json_response['default_project_visibility']).to be_a String
expect(json_response['default_snippet_visibility']).to be_a String
expect(json_response['default_group_visibility']).to be_a String
@@ -42,17 +48,22 @@ describe API::Settings, 'Settings' do
storages = Gitlab.config.repositories.storages
.merge({ 'custom' => 'tmp/tests/custom_repositories' })
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
+ Feature.get(:sourcegraph).enable
end
it "updates application settings" do
put api("/application/settings", admin),
params: {
+ default_ci_config_path: 'debian/salsa-ci.yml',
default_projects_limit: 3,
default_project_creation: 2,
password_authentication_enabled_for_web: false,
repository_storages: ['custom'],
plantuml_enabled: true,
plantuml_url: 'http://plantuml.example.com',
+ sourcegraph_enabled: true,
+ sourcegraph_url: 'https://sourcegraph.com',
+ sourcegraph_public_only: false,
default_snippet_visibility: 'internal',
restricted_visibility_levels: ['public'],
default_artifacts_expire_in: '2 days',
@@ -78,12 +89,16 @@ describe API::Settings, 'Settings' do
}
expect(response).to have_gitlab_http_status(200)
+ expect(json_response['default_ci_config_path']).to eq('debian/salsa-ci.yml')
expect(json_response['default_projects_limit']).to eq(3)
expect(json_response['default_project_creation']).to eq(::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
expect(json_response['password_authentication_enabled_for_web']).to be_falsey
expect(json_response['repository_storages']).to eq(['custom'])
expect(json_response['plantuml_enabled']).to be_truthy
expect(json_response['plantuml_url']).to eq('http://plantuml.example.com')
+ expect(json_response['sourcegraph_enabled']).to be_truthy
+ expect(json_response['sourcegraph_url']).to eq('https://sourcegraph.com')
+ expect(json_response['sourcegraph_public_only']).to eq(false)
expect(json_response['default_snippet_visibility']).to eq('internal')
expect(json_response['restricted_visibility_levels']).to eq(['public'])
expect(json_response['default_artifacts_expire_in']).to eq('2 days')
@@ -176,7 +191,8 @@ describe API::Settings, 'Settings' do
snowplow_collector_hostname: "snowplow.example.com",
snowplow_cookie_domain: ".example.com",
snowplow_enabled: true,
- snowplow_site_id: "site_id"
+ snowplow_app_id: "app_id",
+ snowplow_iglu_registry_url: 'https://example.com'
}
end
@@ -220,6 +236,61 @@ describe API::Settings, 'Settings' do
end
end
+ context 'EKS integration settings' do
+ let(:attribute_names) { settings.keys.map(&:to_s) }
+ let(:sensitive_attributes) { %w(eks_secret_access_key) }
+ let(:exposed_attributes) { attribute_names - sensitive_attributes }
+
+ let(:settings) do
+ {
+ eks_integration_enabled: true,
+ eks_account_id: '123456789012',
+ eks_access_key_id: 'access-key-id-12',
+ eks_secret_access_key: 'secret-access-key'
+ }
+ end
+
+ it 'includes attributes in the API' do
+ get api("/application/settings", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ exposed_attributes.each do |attribute|
+ expect(json_response.keys).to include(attribute)
+ end
+ end
+
+ it 'does not include sensitive attributes in the API' do
+ get api("/application/settings", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ sensitive_attributes.each do |attribute|
+ expect(json_response.keys).not_to include(attribute)
+ end
+ end
+
+ it 'allows updating the settings' do
+ put api("/application/settings", admin), params: settings
+
+ expect(response).to have_gitlab_http_status(200)
+ settings.each do |attribute, value|
+ expect(ApplicationSetting.current.public_send(attribute)).to eq(value)
+ end
+ end
+
+ context 'EKS integration is enabled but params are blank' do
+ let(:settings) { Hash[eks_integration_enabled: true] }
+
+ it 'does not update the settings' do
+ put api("/application/settings", admin), params: settings
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['error']).to include('eks_account_id is missing')
+ expect(json_response['error']).to include('eks_access_key_id is missing')
+ expect(json_response['error']).to include('eks_secret_access_key is missing')
+ end
+ end
+ end
+
context "missing plantuml_url value when plantuml_enabled is true" do
it "returns a blank parameter error message" do
put api("/application/settings", admin), params: { plantuml_enabled: true }
@@ -294,5 +365,14 @@ describe API::Settings, 'Settings' do
expect(json_response['domain_blacklist']).to eq(['domain3.com', '*.domain4.com'])
end
end
+
+ context "missing sourcegraph_url value when sourcegraph_enabled is true" do
+ it "returns a blank parameter error message" do
+ put api("/application/settings", admin), params: { sourcegraph_enabled: true }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['error']).to eq('sourcegraph_url is missing')
+ end
+ end
end
end
diff --git a/spec/requests/api/sidekiq_metrics_spec.rb b/spec/requests/api/sidekiq_metrics_spec.rb
index fff9adb7f57..438b1475c54 100644
--- a/spec/requests/api/sidekiq_metrics_spec.rb
+++ b/spec/requests/api/sidekiq_metrics_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::SidekiqMetrics do
@@ -23,6 +25,10 @@ describe API::SidekiqMetrics do
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_a Hash
+ expect(json_response['jobs']).to be_a Hash
+ expect(json_response['jobs'].keys)
+ .to contain_exactly(*%w[processed failed enqueued dead])
+ expect(json_response['jobs'].values).to all(be_an(Integer))
end
it 'defines the `compound_metrics` endpoint' do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index e7eaaea2418..36d2a0d7ea7 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Snippets do
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index 0e2f3face71..79790b1e999 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::SystemHooks do
diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb
index c4f4a2cb889..3c6ec631664 100644
--- a/spec/requests/api/tags_spec.rb
+++ b/spec/requests/api/tags_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Tags do
diff --git a/spec/requests/api/templates_spec.rb b/spec/requests/api/templates_spec.rb
index d1e16ab9ca9..b6ba417d892 100644
--- a/spec/requests/api/templates_spec.rb
+++ b/spec/requests/api/templates_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Templates do
diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb
index 9f0d5ad5d12..4121a0f3f3a 100644
--- a/spec/requests/api/todos_spec.rb
+++ b/spec/requests/api/todos_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Todos do
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb
index 8ea3d16a41f..fd1104fa978 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/triggers_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Triggers do
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index ee4e783e9ac..1a1e80f1ce3 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Users do
@@ -633,32 +635,6 @@ describe API::Users do
end
end
- describe "GET /users/sign_up" do
- context 'when experimental signup_flow is active' do
- before do
- stub_experiment(signup_flow: true)
- end
-
- it "shows sign up page" do
- get "/users/sign_up"
- expect(response).to have_gitlab_http_status(200)
- expect(response).to render_template(:new)
- end
- end
-
- context 'when experimental signup_flow is not active' do
- before do
- stub_experiment(signup_flow: false)
- end
-
- it "redirects to sign in page" do
- get "/users/sign_up"
- expect(response).to have_gitlab_http_status(302)
- expect(response).to redirect_to(new_user_session_path(anchor: 'register-pane'))
- end
- end
- end
-
describe "PUT /users/:id" do
let!(:admin_user) { create(:admin) }
@@ -1277,7 +1253,7 @@ describe API::Users do
admin
end
- it "deletes user" do
+ it "deletes user", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(204)
@@ -1312,7 +1288,7 @@ describe API::Users do
end
context "hard delete disabled" do
- it "moves contributions to the ghost user" do
+ it "moves contributions to the ghost user", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(204)
@@ -1322,7 +1298,7 @@ describe API::Users do
end
context "hard delete enabled" do
- it "removes contributions" do
+ it "removes contributions", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) }
expect(response).to have_gitlab_http_status(204)
diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/variables_spec.rb
index 69f105b71a8..dfecd43cbfa 100644
--- a/spec/requests/api/variables_spec.rb
+++ b/spec/requests/api/variables_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Variables do
diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb
index e06f8bbc095..e2117ca45ee 100644
--- a/spec/requests/api/version_spec.rb
+++ b/spec/requests/api/version_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe API::Version do
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 97de26650db..310caa92eb9 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# For every API endpoint we test 3 states of wikis:
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index e58f1b7d9dc..1b17d492b0c 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Git HTTP requests' do
@@ -87,7 +89,7 @@ describe 'Git HTTP requests' do
end
shared_examples_for 'pulls are allowed' do
- it do
+ it 'allows pulls' do
download(path, env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
@@ -96,7 +98,7 @@ describe 'Git HTTP requests' do
end
shared_examples_for 'pushes are allowed' do
- it do
+ it 'allows pushes', :sidekiq_might_not_need_inline do
upload(path, env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
@@ -450,16 +452,22 @@ describe 'Git HTTP requests' do
context "when authentication fails" do
context "when the user is IP banned" do
before do
- Gitlab.config.rack_attack.git_basic_auth['enabled'] = true
+ stub_rack_attack_setting(enabled: true, ip_whitelist: [])
end
- it "responds with status 401" do
+ it "responds with status 403" do
expect(Rack::Attack::Allow2Ban).to receive(:filter).and_return(true)
- allow_any_instance_of(ActionDispatch::Request).to receive(:ip).and_return('1.2.3.4')
+ expect(Gitlab::AuthLogger).to receive(:error).with({
+ message: 'Rack_Attack',
+ env: :blocklist,
+ remote_ip: '127.0.0.1',
+ request_method: 'GET',
+ path: "/#{path}/info/refs?service=git-upload-pack"
+ })
clone_get(path, env)
- expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -493,7 +501,7 @@ describe 'Git HTTP requests' do
context "when the user isn't blocked" do
before do
- Gitlab.config.rack_attack.git_basic_auth['enabled'] = true
+ stub_rack_attack_setting(enabled: true, bantime: 1.minute, findtime: 5.minutes, maxretry: 2, ip_whitelist: [])
end
it "resets the IP in Rack Attack on download" do
@@ -652,9 +660,11 @@ describe 'Git HTTP requests' do
response.status
end
+ include_context 'rack attack cache store'
+
it "repeated attempts followed by successful attempt" do
options = Gitlab.config.rack_attack.git_basic_auth
- maxretry = options[:maxretry] - 1
+ maxretry = options[:maxretry]
ip = '1.2.3.4'
allow_any_instance_of(ActionDispatch::Request).to receive(:ip).and_return(ip)
@@ -666,12 +676,6 @@ describe 'Git HTTP requests' do
expect(attempt_login(true)).to eq(200)
expect(Rack::Attack::Allow2Ban.banned?(ip)).to be_falsey
-
- maxretry.times.each do
- expect(attempt_login(false)).to eq(401)
- end
-
- Rack::Attack::Allow2Ban.reset(ip, options)
end
end
@@ -843,8 +847,8 @@ describe 'Git HTTP requests' do
get "/#{project.full_path}/blob/master/info/refs"
end
- it "returns not found" do
- expect(response).to have_gitlab_http_status(:not_found)
+ it "redirects" do
+ expect(response).to have_gitlab_http_status(302)
end
end
end
diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb
index af19d931284..977cccad29f 100644
--- a/spec/requests/groups/milestones_controller_spec.rb
+++ b/spec/requests/groups/milestones_controller_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Groups::MilestonesController do
diff --git a/spec/requests/groups/registry/repositories_controller_spec.rb b/spec/requests/groups/registry/repositories_controller_spec.rb
new file mode 100644
index 00000000000..35fdeaab604
--- /dev/null
+++ b/spec/requests/groups/registry/repositories_controller_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::Registry::RepositoriesController do
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ stub_container_registry_config(enabled: true)
+
+ group.add_reporter(user)
+ login_as(user)
+ end
+
+ describe 'GET groups/:group_id/-/container_registries.json' do
+ it 'avoids N+1 queries' do
+ project = create(:project, group: group)
+ create(:container_repository, project: project)
+ endpoint = group_container_registries_path(group, format: :json)
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get(endpoint) }.count
+
+ create_list(:project, 2, group: group).each do |project|
+ create_list(:container_repository, 2, project: project)
+ end
+
+ expect { get(endpoint) }.not_to exceed_all_query_limit(control_count)
+
+ # sanity check that response is 200
+ expect(response).to have_http_status(200)
+ repositories = json_response
+ expect(repositories.count).to eq(5)
+ end
+ end
+end
diff --git a/spec/requests/health_controller_spec.rb b/spec/requests/health_controller_spec.rb
new file mode 100644
index 00000000000..61412815039
--- /dev/null
+++ b/spec/requests/health_controller_spec.rb
@@ -0,0 +1,227 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe HealthController do
+ include StubENV
+
+ let(:token) { Gitlab::CurrentSettings.health_check_access_token }
+ let(:whitelisted_ip) { '1.1.1.1' }
+ let(:not_whitelisted_ip) { '2.2.2.2' }
+ let(:params) { {} }
+ let(:headers) { {} }
+
+ before do
+ allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip])
+ stub_storage_settings({}) # Hide the broken storage
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ end
+
+ shared_context 'endpoint querying database' do
+ it 'does query database' do
+ control_count = ActiveRecord::QueryRecorder.new { subject }.count
+
+ expect(control_count).not_to be_zero
+ end
+ end
+
+ shared_context 'endpoint not querying database' do
+ it 'does not query database' do
+ control_count = ActiveRecord::QueryRecorder.new { subject }.count
+
+ expect(control_count).to be_zero
+ end
+ end
+
+ shared_context 'endpoint not found' do
+ it 'responds with resource not found' do
+ subject
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ describe 'GET /-/health' do
+ subject { get '/-/health', params: params, headers: headers }
+
+ shared_context 'endpoint responding with health data' do
+ it 'responds with health checks data' do
+ subject
+
+ expect(response.status).to eq(200)
+ expect(response.body).to eq('GitLab OK')
+ end
+ end
+
+ context 'accessed from whitelisted ip' do
+ before do
+ stub_remote_addr(whitelisted_ip)
+ end
+
+ it_behaves_like 'endpoint responding with health data'
+ it_behaves_like 'endpoint not querying database'
+ end
+
+ context 'accessed from not whitelisted ip' do
+ before do
+ stub_remote_addr(not_whitelisted_ip)
+ end
+
+ it_behaves_like 'endpoint not querying database'
+ it_behaves_like 'endpoint not found'
+ end
+ end
+
+ describe 'GET /-/readiness' do
+ subject { get '/-/readiness', params: params, headers: headers }
+
+ shared_context 'endpoint responding with readiness data' do
+ context 'when requesting instance-checks' do
+ it 'responds with readiness checks data' do
+ expect(Gitlab::HealthChecks::MasterCheck).to receive(:check) { true }
+
+ subject
+
+ expect(json_response).to include({ 'status' => 'ok' })
+ expect(json_response['master_check']).to contain_exactly({ 'status' => 'ok' })
+ end
+
+ it 'responds with readiness checks data when a failure happens' do
+ expect(Gitlab::HealthChecks::MasterCheck).to receive(:check) { false }
+
+ subject
+
+ expect(json_response).to include({ 'status' => 'failed' })
+ expect(json_response['master_check']).to contain_exactly(
+ { 'status' => 'failed', 'message' => 'unexpected Master check result: false' })
+
+ expect(response.status).to eq(503)
+ expect(response.headers['X-GitLab-Custom-Error']).to eq(1)
+ end
+ end
+
+ context 'when requesting all checks' do
+ before do
+ params.merge!(all: true)
+ end
+
+ it 'responds with readiness checks data' do
+ subject
+
+ expect(json_response['db_check']).to contain_exactly({ 'status' => 'ok' })
+ expect(json_response['cache_check']).to contain_exactly({ 'status' => 'ok' })
+ expect(json_response['queues_check']).to contain_exactly({ 'status' => 'ok' })
+ expect(json_response['shared_state_check']).to contain_exactly({ 'status' => 'ok' })
+ expect(json_response['gitaly_check']).to contain_exactly(
+ { 'status' => 'ok', 'labels' => { 'shard' => 'default' } })
+ end
+
+ it 'responds with readiness checks data when a failure happens' do
+ allow(Gitlab::HealthChecks::Redis::RedisCheck).to receive(:readiness).and_return(
+ Gitlab::HealthChecks::Result.new('redis_check', false, "check error"))
+
+ subject
+
+ expect(json_response['cache_check']).to contain_exactly({ 'status' => 'ok' })
+ expect(json_response['redis_check']).to contain_exactly(
+ { 'status' => 'failed', 'message' => 'check error' })
+
+ expect(response.status).to eq(503)
+ expect(response.headers['X-GitLab-Custom-Error']).to eq(1)
+ end
+ end
+ end
+
+ context 'accessed from whitelisted ip' do
+ before do
+ stub_remote_addr(whitelisted_ip)
+ end
+
+ it_behaves_like 'endpoint not querying database'
+ it_behaves_like 'endpoint responding with readiness data'
+
+ context 'when requesting all checks' do
+ before do
+ params.merge!(all: true)
+ end
+
+ it_behaves_like 'endpoint querying database'
+ end
+ end
+
+ context 'accessed from not whitelisted ip' do
+ before do
+ stub_remote_addr(not_whitelisted_ip)
+ end
+
+ it_behaves_like 'endpoint not querying database'
+ it_behaves_like 'endpoint not found'
+ end
+
+ context 'accessed with valid token' do
+ context 'token passed in request header' do
+ let(:headers) { { TOKEN: token } }
+
+ it_behaves_like 'endpoint responding with readiness data'
+ it_behaves_like 'endpoint querying database'
+ end
+
+ context 'token passed as URL param' do
+ let(:params) { { token: token } }
+
+ it_behaves_like 'endpoint responding with readiness data'
+ it_behaves_like 'endpoint querying database'
+ end
+ end
+ end
+
+ describe 'GET /-/liveness' do
+ subject { get '/-/liveness', params: params, headers: headers }
+
+ shared_context 'endpoint responding with liveness data' do
+ it 'responds with liveness checks data' do
+ subject
+
+ expect(json_response).to eq('status' => 'ok')
+ end
+ end
+
+ context 'accessed from whitelisted ip' do
+ before do
+ stub_remote_addr(whitelisted_ip)
+ end
+
+ it_behaves_like 'endpoint not querying database'
+ it_behaves_like 'endpoint responding with liveness data'
+ end
+
+ context 'accessed from not whitelisted ip' do
+ before do
+ stub_remote_addr(not_whitelisted_ip)
+ end
+
+ it_behaves_like 'endpoint not querying database'
+ it_behaves_like 'endpoint not found'
+
+ context 'accessed with valid token' do
+ context 'token passed in request header' do
+ let(:headers) { { TOKEN: token } }
+
+ it_behaves_like 'endpoint responding with liveness data'
+ it_behaves_like 'endpoint querying database'
+ end
+
+ context 'token passed as URL param' do
+ let(:params) { { token: token } }
+
+ it_behaves_like 'endpoint responding with liveness data'
+ it_behaves_like 'endpoint querying database'
+ end
+ end
+ end
+ end
+
+ def stub_remote_addr(ip)
+ headers.merge!(REMOTE_ADDR: ip)
+ end
+end
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 8b2c698fee1..c1f99115612 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe JwtController do
diff --git a/spec/requests/lfs_locks_api_spec.rb b/spec/requests/lfs_locks_api_spec.rb
index 11436e5cd0c..41f54162266 100644
--- a/spec/requests/lfs_locks_api_spec.rb
+++ b/spec/requests/lfs_locks_api_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Git LFS File Locking API' do
diff --git a/spec/requests/oauth_tokens_spec.rb b/spec/requests/oauth_tokens_spec.rb
index 3873e754060..bb1c25d686e 100644
--- a/spec/requests/oauth_tokens_spec.rb
+++ b/spec/requests/oauth_tokens_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'OAuth Tokens requests' do
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index dfa17c5ff27..bac1a4e18c8 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'OpenID Connect requests' do
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 25390f8a23e..93a1aafde23 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'cycle analytics events' do
@@ -48,7 +50,7 @@ describe 'cycle analytics events' do
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
- it 'lists the test events' do
+ it 'lists the test events', :sidekiq_might_not_need_inline do
get project_cycle_analytics_test_path(project, format: :json)
expect(json_response['events']).not_to be_empty
@@ -64,14 +66,14 @@ describe 'cycle analytics events' do
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end
- it 'lists the staging events' do
+ it 'lists the staging events', :sidekiq_might_not_need_inline do
get project_cycle_analytics_staging_path(project, format: :json)
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty
end
- it 'lists the production events' do
+ it 'lists the production events', :sidekiq_might_not_need_inline do
get project_cycle_analytics_production_path(project, format: :json)
first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
@@ -81,7 +83,7 @@ describe 'cycle analytics events' do
end
context 'specific branch' do
- it 'lists the test events' do
+ it 'lists the test events', :sidekiq_might_not_need_inline do
branch = project.merge_requests.first.source_branch
get project_cycle_analytics_test_path(project, format: :json, branch: branch)
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index ca8720cd414..4d5055a7e27 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Rack Attack global throttles' do
@@ -20,6 +22,7 @@ describe 'Rack Attack global throttles' do
}
end
+ let(:request_method) { 'GET' }
let(:requests_per_period) { 1 }
let(:period_in_seconds) { 10000 }
let(:period) { period_in_seconds.seconds }
@@ -81,7 +84,7 @@ describe 'Rack Attack global throttles' do
expect(response).to have_http_status 200
end
- expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4')
+ expect_any_instance_of(Rack::Attack::Request).to receive(:ip).at_least(:once).and_return('1.2.3.4')
# would be over limit for the same IP
get url_that_does_not_require_authentication
@@ -141,15 +144,15 @@ describe 'Rack Attack global throttles' do
let(:api_partial_url) { '/todos' }
context 'with the token in the query string' do
- let(:get_args) { [api(api_partial_url, personal_access_token: token)] }
- let(:other_user_get_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, personal_access_token: token)] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
it_behaves_like 'rate-limited token-authenticated requests'
end
context 'with the token in the headers' do
- let(:get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
- let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
it_behaves_like 'rate-limited token-authenticated requests'
end
@@ -168,15 +171,15 @@ describe 'Rack Attack global throttles' do
let(:api_partial_url) { '/todos' }
context 'with the token in the query string' do
- let(:get_args) { [api(api_partial_url, oauth_access_token: token)] }
- let(:other_user_get_args) { [api(api_partial_url, oauth_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, oauth_access_token: token)] }
+ let(:other_user_request_args) { [api(api_partial_url, oauth_access_token: other_user_token)] }
it_behaves_like 'rate-limited token-authenticated requests'
end
context 'with the token in the headers' do
- let(:get_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
- let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
it_behaves_like 'rate-limited token-authenticated requests'
end
@@ -188,8 +191,8 @@ describe 'Rack Attack global throttles' do
let(:throttle_setting_prefix) { 'throttle_authenticated_web' }
context 'with the token in the query string' do
- let(:get_args) { [rss_url(user), params: nil] }
- let(:other_user_get_args) { [rss_url(other_user), params: nil] }
+ let(:request_args) { [rss_url(user), params: nil] }
+ let(:other_user_request_args) { [rss_url(other_user), params: nil] }
it_behaves_like 'rate-limited token-authenticated requests'
end
@@ -204,10 +207,13 @@ describe 'Rack Attack global throttles' do
end
describe 'protected paths' do
+ let(:request_method) { 'POST' }
+
context 'unauthenticated requests' do
let(:protected_path_that_does_not_require_authentication) do
- '/users/confirmation'
+ '/users/sign_in'
end
+ let(:post_params) { { user: { login: 'username', password: 'password' } } }
before do
settings_to_set[:throttle_protected_paths_requests_per_period] = requests_per_period # 1
@@ -222,7 +228,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
- get protected_path_that_does_not_require_authentication
+ post protected_path_that_does_not_require_authentication, params: post_params
expect(response).to have_http_status 200
end
end
@@ -236,11 +242,11 @@ describe 'Rack Attack global throttles' do
it 'rejects requests over the rate limit' do
requests_per_period.times do
- get protected_path_that_does_not_require_authentication
+ post protected_path_that_does_not_require_authentication, params: post_params
expect(response).to have_http_status 200
end
- expect_rejection { get protected_path_that_does_not_require_authentication }
+ expect_rejection { post protected_path_that_does_not_require_authentication, params: post_params }
end
context 'when Omnibus throttle is present' do
@@ -251,7 +257,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
- get protected_path_that_does_not_require_authentication
+ post protected_path_that_does_not_require_authentication, params: post_params
expect(response).to have_http_status 200
end
end
@@ -265,11 +271,11 @@ describe 'Rack Attack global throttles' do
let(:other_user) { create(:user) }
let(:other_user_token) { create(:personal_access_token, user: other_user) }
let(:throttle_setting_prefix) { 'throttle_protected_paths' }
- let(:api_partial_url) { '/users' }
+ let(:api_partial_url) { '/user/emails' }
let(:protected_paths) do
[
- '/api/v4/users'
+ '/api/v4/user/emails'
]
end
@@ -279,22 +285,22 @@ describe 'Rack Attack global throttles' do
end
context 'with the token in the query string' do
- let(:get_args) { [api(api_partial_url, personal_access_token: token)] }
- let(:other_user_get_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, personal_access_token: token)] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
it_behaves_like 'rate-limited token-authenticated requests'
end
context 'with the token in the headers' do
- let(:get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
- let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
it_behaves_like 'rate-limited token-authenticated requests'
end
context 'when Omnibus throttle is present' do
- let(:get_args) { [api(api_partial_url, personal_access_token: token)] }
- let(:other_user_get_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, personal_access_token: token)] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
before do
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
@@ -308,8 +314,8 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
- get(*get_args)
- expect(response).to have_http_status 200
+ post(*request_args)
+ expect(response).not_to have_http_status 429
end
end
end
@@ -318,7 +324,7 @@ describe 'Rack Attack global throttles' do
describe 'web requests authenticated with regular login' do
let(:throttle_setting_prefix) { 'throttle_protected_paths' }
let(:user) { create(:user) }
- let(:url_that_requires_authentication) { '/dashboard/snippets' }
+ let(:url_that_requires_authentication) { '/users/confirmation' }
let(:protected_paths) do
[
@@ -348,8 +354,8 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ post url_that_requires_authentication
+ expect(response).not_to have_http_status 429
end
end
end
diff --git a/spec/requests/request_profiler_spec.rb b/spec/requests/request_profiler_spec.rb
index 851affbcf88..36ccfc6b400 100644
--- a/spec/requests/request_profiler_spec.rb
+++ b/spec/requests/request_profiler_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'Request Profiler' do
diff --git a/spec/routing/admin_routing_spec.rb b/spec/routing/admin_routing_spec.rb
index 77baaef7afd..a82bdfe3ce8 100644
--- a/spec/routing/admin_routing_spec.rb
+++ b/spec/routing/admin_routing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# block_admin_user PUT /admin/users/:id/block(.:format) admin/users#block
diff --git a/spec/routing/environments_spec.rb b/spec/routing/environments_spec.rb
index 28b3e79c1ff..ea172698764 100644
--- a/spec/routing/environments_spec.rb
+++ b/spec/routing/environments_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'environments routing' do
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index c6b101ae908..2a8454a276d 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe "Groups", "routing" do
diff --git a/spec/routing/import_routing_spec.rb b/spec/routing/import_routing_spec.rb
index 3fdede7914d..7e78a1c0cd2 100644
--- a/spec/routing/import_routing_spec.rb
+++ b/spec/routing/import_routing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# Shared examples for a resource inside a Project
diff --git a/spec/routing/notifications_routing_spec.rb b/spec/routing/notifications_routing_spec.rb
index 54ed87b5520..8c2b29aabcb 100644
--- a/spec/routing/notifications_routing_spec.rb
+++ b/spec/routing/notifications_routing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require "spec_helper"
describe "notifications routing" do
diff --git a/spec/routing/openid_connect_spec.rb b/spec/routing/openid_connect_spec.rb
index 2c3bc08f1a1..70470032930 100644
--- a/spec/routing/openid_connect_spec.rb
+++ b/spec/routing/openid_connect_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# oauth_discovery_keys GET /oauth/discovery/keys(.:format) doorkeeper/openid_connect/discovery#keys
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index acdbf064a73..561c2b572ec 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe 'project routing' do
@@ -786,4 +788,10 @@ describe 'project routing' do
expect(put("/gitlab/gitlabhq/-/deploy_tokens/1/revoke")).to route_to("projects/deploy_tokens#revoke", namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
end
+
+ describe Projects::UsagePingController, 'routing' do
+ it 'routes to usage_ping#web_ide_clientside_preview' do
+ expect(post('/gitlab/gitlabhq/usage_ping/web_ide_clientside_preview')).to route_to('projects/usage_ping#web_ide_clientside_preview', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+ end
end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 1b982fa7744..6f67cdb1222 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
# user GET /users/:username/
@@ -275,6 +277,33 @@ describe "Authentication", "routing" do
it "PUT /users/password" do
expect(put("/users/password")).to route_to('passwords#update')
end
+
+ context 'with LDAP configured' do
+ include LdapHelpers
+
+ let(:ldap_settings) { { enabled: true } }
+
+ before do
+ stub_ldap_setting(ldap_settings)
+ Rails.application.reload_routes!
+ end
+
+ after(:all) do
+ Rails.application.reload_routes!
+ end
+
+ it 'POST /users/auth/ldapmain/callback' do
+ expect(post("/users/auth/ldapmain/callback")).to route_to('ldap/omniauth_callbacks#ldapmain')
+ end
+
+ context 'with LDAP sign-in disabled' do
+ let(:ldap_settings) { { enabled: true, prevent_ldap_sign_in: true } }
+
+ it 'prevents POST /users/auth/ldapmain/callback' do
+ expect(post("/users/auth/ldapmain/callback")).not_to be_routable
+ end
+ end
+ end
end
describe HealthCheckController, 'routing' do
diff --git a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
index ac7b1575ec0..62f6c7a3414 100644
--- a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
+++ b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
index a5c280a7adc..133d286ccd2 100644
--- a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
+++ b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/destroy_all_spec.rb b/spec/rubocop/cop/destroy_all_spec.rb
index b0bc40552b3..ac8aa56e040 100644
--- a/spec/rubocop/cop/destroy_all_spec.rb
+++ b/spec/rubocop/cop/destroy_all_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
index 7f689b196c5..7af98b66218 100644
--- a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
+++ b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/gitlab/httparty_spec.rb b/spec/rubocop/cop/gitlab/httparty_spec.rb
index 510839a21d7..42da97679ec 100644
--- a/spec/rubocop/cop/gitlab/httparty_spec.rb
+++ b/spec/rubocop/cop/gitlab/httparty_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb b/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
index 8e2d5f70353..9cb55ced1fa 100644
--- a/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
+++ b/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb b/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
index 21fc4584654..ae9466368d2 100644
--- a/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
+++ b/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb b/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
index 7b5235a3da7..8e027ad59f7 100644
--- a/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
+++ b/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/include_sidekiq_worker_spec.rb b/spec/rubocop/cop/include_sidekiq_worker_spec.rb
index f5109287876..39965646aff 100644
--- a/spec/rubocop/cop/include_sidekiq_worker_spec.rb
+++ b/spec/rubocop/cop/include_sidekiq_worker_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/line_break_around_conditional_block_spec.rb b/spec/rubocop/cop/line_break_around_conditional_block_spec.rb
index cc933ce12c8..d09de4c6614 100644
--- a/spec/rubocop/cop/line_break_around_conditional_block_spec.rb
+++ b/spec/rubocop/cop/line_break_around_conditional_block_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
diff --git a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
index 1df1fffb94e..419d74c298a 100644
--- a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
+++ b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/add_concurrent_index_spec.rb b/spec/rubocop/cop/migration/add_concurrent_index_spec.rb
index 9c1ebcc0ced..9812e64216f 100644
--- a/spec/rubocop/cop/migration/add_concurrent_index_spec.rb
+++ b/spec/rubocop/cop/migration/add_concurrent_index_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/add_reference_spec.rb b/spec/rubocop/cop/migration/add_reference_spec.rb
index 0b56fe8ed83..03348ecc744 100644
--- a/spec/rubocop/cop/migration/add_reference_spec.rb
+++ b/spec/rubocop/cop/migration/add_reference_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/add_timestamps_spec.rb b/spec/rubocop/cop/migration/add_timestamps_spec.rb
index 33f1bb85af8..a3314d878e5 100644
--- a/spec/rubocop/cop/migration/add_timestamps_spec.rb
+++ b/spec/rubocop/cop/migration/add_timestamps_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/datetime_spec.rb b/spec/rubocop/cop/migration/datetime_spec.rb
index f2d9483d8d3..0a771003100 100644
--- a/spec/rubocop/cop/migration/datetime_spec.rb
+++ b/spec/rubocop/cop/migration/datetime_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/hash_index_spec.rb b/spec/rubocop/cop/migration/hash_index_spec.rb
index 5d53dde9a79..e8b05a94653 100644
--- a/spec/rubocop/cop/migration/hash_index_spec.rb
+++ b/spec/rubocop/cop/migration/hash_index_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/remove_column_spec.rb b/spec/rubocop/cop/migration/remove_column_spec.rb
index f1a64f431bd..bc2fa04ce64 100644
--- a/spec/rubocop/cop/migration/remove_column_spec.rb
+++ b/spec/rubocop/cop/migration/remove_column_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
index a23d5d022e3..9de4c756f12 100644
--- a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
+++ b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/remove_index_spec.rb b/spec/rubocop/cop/migration/remove_index_spec.rb
index bbf2227e512..d343d27484a 100644
--- a/spec/rubocop/cop/migration/remove_index_spec.rb
+++ b/spec/rubocop/cop/migration/remove_index_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb b/spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb
index ba8cd2c6c4a..b3c5b855004 100644
--- a/spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb
+++ b/spec/rubocop/cop/migration/reversible_add_column_with_default_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/safer_boolean_column_spec.rb b/spec/rubocop/cop/migration/safer_boolean_column_spec.rb
index 1c4f18fbcc3..915b73ed5a7 100644
--- a/spec/rubocop/cop/migration/safer_boolean_column_spec.rb
+++ b/spec/rubocop/cop/migration/safer_boolean_column_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/timestamps_spec.rb b/spec/rubocop/cop/migration/timestamps_spec.rb
index cafe255dc9a..d03c75e7cfc 100644
--- a/spec/rubocop/cop/migration/timestamps_spec.rb
+++ b/spec/rubocop/cop/migration/timestamps_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
index cba01400d85..f72efaf2eb2 100644
--- a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
+++ b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/migration/update_large_table_spec.rb b/spec/rubocop/cop/migration/update_large_table_spec.rb
index 5e08eb4f772..0463b6550a8 100644
--- a/spec/rubocop/cop/migration/update_large_table_spec.rb
+++ b/spec/rubocop/cop/migration/update_large_table_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/project_path_helper_spec.rb b/spec/rubocop/cop/project_path_helper_spec.rb
index 84e6eb7d87f..1b69030c798 100644
--- a/spec/rubocop/cop/project_path_helper_spec.rb
+++ b/spec/rubocop/cop/project_path_helper_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/rspec/any_instance_of_spec.rb b/spec/rubocop/cop/rspec/any_instance_of_spec.rb
new file mode 100644
index 00000000000..b16f8ac189c
--- /dev/null
+++ b/spec/rubocop/cop/rspec/any_instance_of_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_relative '../../../../rubocop/cop/rspec/any_instance_of'
+
+describe RuboCop::Cop::RSpec::AnyInstanceOf do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'when calling allow_any_instance_of' do
+ let(:source) do
+ <<~SRC
+ allow_any_instance_of(User).to receive(:invalidate_issue_cache_counts)
+ SRC
+ end
+ let(:corrected_source) do
+ <<~SRC
+ allow_next_instance_of(User) do |instance|
+ allow(instance).to receive(:invalidate_issue_cache_counts)
+ end
+ SRC
+ end
+
+ it 'registers an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'can autocorrect the source' do
+ expect(autocorrect_source(source)).to eq(corrected_source)
+ end
+ end
+
+ context 'when calling expect_any_instance_of' do
+ let(:source) do
+ <<~SRC
+ expect_any_instance_of(User).to receive(:invalidate_issue_cache_counts).with(args).and_return(double)
+ SRC
+ end
+ let(:corrected_source) do
+ <<~SRC
+ expect_next_instance_of(User) do |instance|
+ expect(instance).to receive(:invalidate_issue_cache_counts).with(args).and_return(double)
+ end
+ SRC
+ end
+
+ it 'registers an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'can autocorrect the source' do
+ expect(autocorrect_source(source)).to eq(corrected_source)
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rspec/env_assignment_spec.rb b/spec/rubocop/cop/rspec/env_assignment_spec.rb
index 621afbad3ba..2a2bd1434d6 100644
--- a/spec/rubocop/cop/rspec/env_assignment_spec.rb
+++ b/spec/rubocop/cop/rspec/env_assignment_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
index 94324bc615d..20013519db4 100644
--- a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
+++ b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/rubocop/cop/sidekiq_options_queue_spec.rb b/spec/rubocop/cop/sidekiq_options_queue_spec.rb
index 7f237d5ffbb..c10fd7bd32b 100644
--- a/spec/rubocop/cop/sidekiq_options_queue_spec.rb
+++ b/spec/rubocop/cop/sidekiq_options_queue_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
require 'rubocop'
diff --git a/spec/serializers/blob_entity_spec.rb b/spec/serializers/blob_entity_spec.rb
index c0687d0232e..7e3a0a87bd5 100644
--- a/spec/serializers/blob_entity_spec.rb
+++ b/spec/serializers/blob_entity_spec.rb
@@ -15,8 +15,16 @@ describe BlobEntity do
context 'as json' do
subject { entity.as_json }
- it 'exposes needed attributes' do
- expect(subject).to include(:readable_text, :url)
+ it 'contains needed attributes' do
+ expect(subject).to include({
+ id: blob.id,
+ path: blob.path,
+ name: blob.name,
+ mode: "100644",
+ readable_text: true,
+ icon: "file-text-o",
+ url: "/#{project.full_path}/blob/master/bar/branch-test.txt"
+ })
end
end
end
diff --git a/spec/serializers/diff_file_base_entity_spec.rb b/spec/serializers/diff_file_base_entity_spec.rb
index 68c5c665ed6..80f5bc8f159 100644
--- a/spec/serializers/diff_file_base_entity_spec.rb
+++ b/spec/serializers/diff_file_base_entity_spec.rb
@@ -5,15 +5,15 @@ require 'spec_helper'
describe DiffFileBaseEntity do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
+ let(:entity) { described_class.new(diff_file, options).as_json }
context 'diff for a changed submodule' do
let(:commit_sha_with_changed_submodule) do
"cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
end
let(:commit) { project.commit(commit_sha_with_changed_submodule) }
- let(:diff_file) { commit.diffs.diff_files.to_a.last }
let(:options) { { request: {}, submodule_links: Gitlab::SubmoduleLinks.new(repository) } }
- let(:entity) { described_class.new(diff_file, options).as_json }
+ let(:diff_file) { commit.diffs.diff_files.to_a.last }
it do
expect(entity[:submodule]).to eq(true)
@@ -23,4 +23,15 @@ describe DiffFileBaseEntity do
)
end
end
+
+ context 'contains raw sizes for the blob' do
+ let(:commit) { project.commit('png-lfs') }
+ let(:options) { { request: {} } }
+ let(:diff_file) { commit.diffs.diff_files.to_a.second }
+
+ it do
+ expect(entity[:old_size]).to eq(1219696)
+ expect(entity[:new_size]).to eq(132)
+ end
+ end
end
diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb
index 0c2e7c1e3eb..65b62f8aa16 100644
--- a/spec/serializers/diff_file_entity_spec.rb
+++ b/spec/serializers/diff_file_entity_spec.rb
@@ -11,7 +11,8 @@ describe DiffFileEntity do
let(:diff_refs) { commit.diff_refs }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs: diff_refs, repository: repository) }
- let(:entity) { described_class.new(diff_file, request: {}) }
+ let(:options) { {} }
+ let(:entity) { described_class.new(diff_file, options.reverse_merge(request: {})) }
subject { entity.as_json }
@@ -23,7 +24,7 @@ describe DiffFileEntity do
let(:user) { create(:user) }
let(:request) { EntityRequest.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let(:entity) { described_class.new(diff_file, request: request, merge_request: merge_request) }
+ let(:entity) { described_class.new(diff_file, options.merge(request: request, merge_request: merge_request)) }
let(:exposed_urls) { %i(edit_path view_path context_lines_path) }
it_behaves_like 'diff file entity'
@@ -49,6 +50,8 @@ describe DiffFileEntity do
end
context '#parallel_diff_lines' do
+ let(:options) { { diff_view: :parallel } }
+
it 'exposes parallel diff lines correctly' do
response = subject
diff --git a/spec/serializers/issuable_sidebar_extras_entity_spec.rb b/spec/serializers/issuable_sidebar_extras_entity_spec.rb
new file mode 100644
index 00000000000..a1a7c554b49
--- /dev/null
+++ b/spec/serializers/issuable_sidebar_extras_entity_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IssuableSidebarExtrasEntity do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:resource) { create(:issue, project: project) }
+ let(:request) { double('request', current_user: user) }
+
+ subject { described_class.new(resource, request: request).as_json }
+
+ it 'have subscribe attributes' do
+ expect(subject).to include(:participants,
+ :project_emails_disabled,
+ :subscribe_disabled_description,
+ :subscribed,
+ :assignees)
+ end
+end
diff --git a/spec/serializers/job_artifact_report_entity_spec.rb b/spec/serializers/job_artifact_report_entity_spec.rb
index eef5c16d0fb..3cd12f0e9fe 100644
--- a/spec/serializers/job_artifact_report_entity_spec.rb
+++ b/spec/serializers/job_artifact_report_entity_spec.rb
@@ -22,7 +22,7 @@ describe JobArtifactReportEntity do
end
it 'exposes download path' do
- expect(subject[:download_path]).to include("jobs/#{report.job.id}/artifacts/download")
+ expect(subject[:download_path]).to include("jobs/#{report.job.id}/artifacts/download?file_type=#{report.file_type}")
end
end
end
diff --git a/spec/serializers/merge_request_diff_entity_spec.rb b/spec/serializers/merge_request_diff_entity_spec.rb
index 062f17963c0..59ec0b22158 100644
--- a/spec/serializers/merge_request_diff_entity_spec.rb
+++ b/spec/serializers/merge_request_diff_entity_spec.rb
@@ -7,14 +7,15 @@ describe MergeRequestDiffEntity do
let(:request) { EntityRequest.new(project: project) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
+ let(:merge_request_diff) { merge_request_diffs.first }
let(:entity) do
- described_class.new(merge_request_diffs.first, request: request, merge_request: merge_request, merge_request_diffs: merge_request_diffs)
+ described_class.new(merge_request_diff, request: request, merge_request: merge_request, merge_request_diffs: merge_request_diffs)
end
- context 'as json' do
- subject { entity.as_json }
+ subject { entity.as_json }
+ context 'as json' do
it 'exposes needed attributes' do
expect(subject).to include(
:version_index, :created_at, :commits_count,
@@ -23,4 +24,16 @@ describe MergeRequestDiffEntity do
)
end
end
+
+ describe '#short_commit_sha' do
+ it 'returns short sha' do
+ expect(subject[:short_commit_sha]).to eq('b83d6e39')
+ end
+
+ it 'returns nil if head_commit_sha does not exist' do
+ allow(merge_request_diff).to receive(:head_commit_sha).and_return(nil)
+
+ expect(subject[:short_commit_sha]).to eq(nil)
+ end
+ end
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 4872b23d26b..35940ac062e 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -358,4 +358,26 @@ describe MergeRequestWidgetEntity do
end
end
end
+
+ describe 'exposed_artifacts_path' do
+ context 'when merge request has exposed artifacts' do
+ before do
+ expect(resource).to receive(:has_exposed_artifacts?).and_return(true)
+ end
+
+ it 'set the path to poll data' do
+ expect(subject[:exposed_artifacts_path]).to be_present
+ end
+ end
+
+ context 'when merge request has no exposed artifacts' do
+ before do
+ expect(resource).to receive(:has_exposed_artifacts?).and_return(false)
+ end
+
+ it 'set the path to poll data' do
+ expect(subject[:exposed_artifacts_path]).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index b180ede51eb..9ce7c265e43 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -115,7 +115,7 @@ describe PipelineDetailsEntity do
context 'when pipeline has YAML errors' do
let(:pipeline) do
- create(:ci_pipeline, config: { rspec: { invalid: :value } })
+ create(:ci_pipeline, yaml_errors: 'Some error occurred')
end
it 'contains information about error' do
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index ce5264ec8bb..7661c8acc13 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -180,7 +180,7 @@ describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
- expected_queries = Gitlab.ee? ? 44 : 41
+ expected_queries = Gitlab.ee? ? 41 : 38
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index ccbb4e7c30d..f2cda999932 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -13,8 +13,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
end
let(:pipeline) do
- create(:ci_pipeline_with_one_job, ref: mr_merge_if_green_enabled.source_branch,
- project: project)
+ create(:ci_pipeline, ref: mr_merge_if_green_enabled.source_branch, project: project)
end
let(:service) do
@@ -226,7 +225,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
test.drop
end
- it 'merges when all stages succeeded' do
+ it 'merges when all stages succeeded', :sidekiq_might_not_need_inline do
expect(MergeWorker).to receive(:perform_async)
build.success
diff --git a/spec/services/ci/cancel_user_pipelines_service_spec.rb b/spec/services/ci/cancel_user_pipelines_service_spec.rb
index 251f21feaef..b18bf48a50a 100644
--- a/spec/services/ci/cancel_user_pipelines_service_spec.rb
+++ b/spec/services/ci/cancel_user_pipelines_service_spec.rb
@@ -12,7 +12,7 @@ describe Ci::CancelUserPipelinesService do
let(:pipeline) { create(:ci_pipeline, :running, user: user) }
let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
- it 'cancels all running pipelines and related jobs' do
+ it 'cancels all running pipelines and related jobs', :sidekiq_might_not_need_inline do
subject
expect(pipeline.reload).to be_canceled
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
new file mode 100644
index 00000000000..4e0567132ff
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CreatePipelineService do
+ context 'cache' do
+ let(:user) { create(:admin) }
+ let(:ref) { 'refs/heads/master' }
+ let(:source) { :push }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(source) }
+ let(:job) { pipeline.builds.find_by(name: 'job') }
+ let(:project) { create(:project, :custom_repo, files: files) }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'with cache:key' do
+ let(:files) { { 'some-file' => '' } }
+
+ let(:config) do
+ <<~EOY
+ job:
+ script:
+ - ls
+ cache:
+ key: 'a-key'
+ paths: ['logs/', 'binaries/']
+ untracked: true
+ EOY
+ end
+
+ it 'uses the provided key' do
+ expected = {
+ 'key' => 'a-key',
+ 'paths' => ['logs/', 'binaries/'],
+ 'policy' => 'pull-push',
+ 'untracked' => true
+ }
+
+ expect(pipeline).to be_persisted
+ expect(job.cache).to match(a_collection_including(expected))
+ end
+ end
+
+ context 'with cache:key:files' do
+ let(:config) do
+ <<~EOY
+ job:
+ script:
+ - ls
+ cache:
+ paths:
+ - logs/
+ key:
+ files:
+ - file.lock
+ - missing-file.lock
+ EOY
+ end
+
+ context 'when file.lock exists' do
+ let(:files) { { 'file.lock' => '' } }
+
+ it 'builds a cache key' do
+ expected = {
+ 'key' => /[a-f0-9]{40}/,
+ 'paths' => ['logs/'],
+ 'policy' => 'pull-push'
+ }
+
+ expect(pipeline).to be_persisted
+ expect(job.cache).to match(a_collection_including(expected))
+ end
+ end
+
+ context 'when file.lock does not exist' do
+ let(:files) { { 'some-file' => '' } }
+
+ it 'uses default cache key' do
+ expected = {
+ 'key' => /default/,
+ 'paths' => ['logs/'],
+ 'policy' => 'pull-push'
+ }
+
+ expect(pipeline).to be_persisted
+ expect(job.cache).to match(a_collection_including(expected))
+ end
+ end
+ end
+
+ context 'with cache:key:files and prefix' do
+ let(:config) do
+ <<~EOY
+ job:
+ script:
+ - ls
+ cache:
+ paths:
+ - logs/
+ key:
+ files:
+ - file.lock
+ prefix: '$ENV_VAR'
+ EOY
+ end
+
+ context 'when file.lock exists' do
+ let(:files) { { 'file.lock' => '' } }
+
+ it 'builds a cache key' do
+ expected = {
+ 'key' => /\$ENV_VAR-[a-f0-9]{40}/,
+ 'paths' => ['logs/'],
+ 'policy' => 'pull-push'
+ }
+
+ expect(pipeline).to be_persisted
+ expect(job.cache).to match(a_collection_including(expected))
+ end
+ end
+
+ context 'when file.lock does not exist' do
+ let(:files) { { 'some-file' => '' } }
+
+ it 'uses default cache key' do
+ expected = {
+ 'key' => /\$ENV_VAR-default/,
+ 'paths' => ['logs/'],
+ 'policy' => 'pull-push'
+ }
+
+ expect(pipeline).to be_persisted
+ expect(job.cache).to match(a_collection_including(expected))
+ end
+ end
+ end
+
+ context 'with too many files' do
+ let(:files) { { 'some-file' => '' } }
+
+ let(:config) do
+ <<~EOY
+ job:
+ script:
+ - ls
+ cache:
+ paths: ['logs/', 'binaries/']
+ untracked: true
+ key:
+ files:
+ - file.lock
+ - other-file.lock
+ - extra-file.lock
+ prefix: 'some-prefix'
+ EOY
+ end
+
+ it 'has errors' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.yaml_errors).to eq("jobs:job:cache:key:files config has too many items (maximum is 2)")
+ expect(job).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 40a3b115cb5..c922266647b 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -1,16 +1,16 @@
# frozen_string_literal: true
-
require 'spec_helper'
describe Ci::CreatePipelineService do
- context 'rules' do
- let(:user) { create(:admin) }
- let(:ref) { 'refs/heads/master' }
- let(:source) { :push }
- let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source) }
- let(:build_names) { pipeline.builds.pluck(:name) }
+ let(:user) { create(:admin) }
+ let(:ref) { 'refs/heads/master' }
+ let(:source) { :push }
+ let(:project) { create(:project, :repository) }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(source) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+ context 'job:rules' do
before do
stub_ci_pipeline_yaml_file(config)
allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
@@ -41,6 +41,7 @@ describe Ci::CreatePipelineService do
start_in: 4 hours
EOY
end
+
let(:regular_job) { pipeline.builds.find_by(name: 'regular-job') }
let(:rules_job) { pipeline.builds.find_by(name: 'rules-job') }
let(:delayed_job) { pipeline.builds.find_by(name: 'delayed-job') }
@@ -91,4 +92,259 @@ describe Ci::CreatePipelineService do
end
end
end
+
+ context 'when workflow:rules are used' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'with a single regex-matching if: clause' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ - if: $CI_COMMIT_REF_NAME =~ /wip$/
+ when: never
+ - if: $CI_COMMIT_REF_NAME =~ /feature/
+
+ regular-job:
+ script: 'echo Hello, World!'
+ EOY
+ end
+
+ context 'matching the first rule in the list' do
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+ end
+
+ context 'matching the last rule in the list' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+ end
+
+ context 'matching the when:never rule' do
+ let(:ref) { 'refs/heads/wip' }
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches errors' do
+ expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
+ end
+ end
+
+ context 'matching no rules in the list' do
+ let(:ref) { 'refs/heads/fix' }
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches errors' do
+ expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
+ end
+ end
+ end
+
+ context 'when root variables are used' do
+ let(:config) do
+ <<-EOY
+ variables:
+ VARIABLE: value
+
+ workflow:
+ rules:
+ - if: $VARIABLE
+
+ regular-job:
+ script: 'echo Hello, World!'
+ EOY
+ end
+
+ context 'matching the first rule in the list' do
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+ end
+ end
+
+ context 'with a multiple regex-matching if: clause' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ - if: $CI_COMMIT_REF_NAME =~ /^feature/ && $CI_COMMIT_REF_NAME =~ /conflict$/
+ when: never
+ - if: $CI_COMMIT_REF_NAME =~ /feature/
+
+ regular-job:
+ script: 'echo Hello, World!'
+ EOY
+ end
+
+ context 'with partial match' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+ end
+
+ context 'with complete match' do
+ let(:ref) { 'refs/heads/feature_conflict' }
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches errors' do
+ expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
+ end
+ end
+ end
+
+ context 'with job rules' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ - if: $CI_COMMIT_REF_NAME =~ /feature/
+
+ regular-job:
+ script: 'echo Hello, World!'
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /wip/
+ - if: $CI_COMMIT_REF_NAME =~ /feature/
+ EOY
+ end
+
+ context 'where workflow passes and the job fails' do
+ let(:ref) { 'refs/heads/master' }
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches an error about no job in the pipeline' do
+ expect(pipeline.errors[:base]).to include('No stages / jobs for this pipeline.')
+ end
+
+ context 'with workflow:rules shut off' do
+ before do
+ stub_feature_flags(workflow_rules: false)
+ end
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches an error about no job in the pipeline' do
+ expect(pipeline.errors[:base]).to include('No stages / jobs for this pipeline.')
+ end
+ end
+ end
+
+ context 'where workflow passes and the job passes' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+
+ context 'with workflow:rules shut off' do
+ before do
+ stub_feature_flags(workflow_rules: false)
+ end
+
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+ end
+ end
+
+ context 'where workflow fails and the job fails' do
+ let(:ref) { 'refs/heads/fix' }
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches an error about workflow rules' do
+ expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
+ end
+
+ context 'with workflow:rules shut off' do
+ before do
+ stub_feature_flags(workflow_rules: false)
+ end
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches an error about job rules' do
+ expect(pipeline.errors[:base]).to include('No stages / jobs for this pipeline.')
+ end
+ end
+ end
+
+ context 'where workflow fails and the job passes' do
+ let(:ref) { 'refs/heads/wip' }
+
+ it 'does not save the pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+
+ it 'attaches an error about workflow rules' do
+ expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
+ end
+
+ context 'with workflow:rules shut off' do
+ before do
+ stub_feature_flags(workflow_rules: false)
+ end
+
+ it 'saves the pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'sets the pipeline state to pending' do
+ expect(pipeline).to be_pending
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index fd5f72c4c46..de0f4841215 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -65,6 +65,7 @@ describe Ci::CreatePipelineService do
expect(pipeline.iid).not_to be_nil
expect(pipeline.repository_source?).to be true
expect(pipeline.builds.first).to be_kind_of(Ci::Build)
+ expect(pipeline.yaml_errors).not_to be_present
end
it 'increments the prometheus counter' do
@@ -97,7 +98,7 @@ describe Ci::CreatePipelineService do
end
context 'when the head pipeline sha equals merge request sha' do
- it 'updates head pipeline of each merge request' do
+ it 'updates head pipeline of each merge request', :sidekiq_might_not_need_inline do
merge_request_1
merge_request_2
@@ -140,7 +141,7 @@ describe Ci::CreatePipelineService do
let!(:project) { fork_project(target_project, nil, repository: true) }
let!(:target_project) { create(:project, :repository) }
- it 'updates head pipeline for merge request' do
+ it 'updates head pipeline for merge request', :sidekiq_might_not_need_inline do
merge_request = create(:merge_request, source_branch: 'feature',
target_branch: "master",
source_project: project,
@@ -172,7 +173,7 @@ describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file('some invalid syntax')
end
- it 'updates merge request head pipeline reference' do
+ it 'updates merge request head pipeline reference', :sidekiq_might_not_need_inline do
merge_request = create(:merge_request, source_branch: 'master',
target_branch: 'feature',
source_project: project)
@@ -192,7 +193,7 @@ describe Ci::CreatePipelineService do
.and_return('some commit [ci skip]')
end
- it 'updates merge request head pipeline' do
+ it 'updates merge request head pipeline', :sidekiq_might_not_need_inline do
merge_request = create(:merge_request, source_branch: 'master',
target_branch: 'feature',
source_project: project)
@@ -218,21 +219,21 @@ describe Ci::CreatePipelineService do
expect(pipeline.reload).to have_attributes(status: 'pending', auto_canceled_by_id: nil)
end
- it 'auto cancel pending non-HEAD pipelines' do
+ it 'auto cancel pending non-HEAD pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id)
end
- it 'cancels running outdated pipelines' do
+ it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit.run
head_pipeline = execute_service
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: head_pipeline.id)
end
- it 'cancel created outdated pipelines' do
+ it 'cancel created outdated pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit.update(status: 'created')
pipeline
@@ -346,7 +347,7 @@ describe Ci::CreatePipelineService do
context 'when only interruptible builds are running' do
context 'when build marked explicitly by interruptible is running' do
- it 'cancels running outdated pipelines' do
+ it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit
.builds
.find_by_name('build_1_2')
@@ -360,7 +361,7 @@ describe Ci::CreatePipelineService do
end
context 'when build that is not marked as interruptible is running' do
- it 'cancels running outdated pipelines' do
+ it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit
.builds
.find_by_name('build_2_1')
@@ -376,7 +377,7 @@ describe Ci::CreatePipelineService do
end
context 'when an uninterruptible build is running' do
- it 'does not cancel running outdated pipelines' do
+ it 'does not cancel running outdated pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit
.builds
.find_by_name('build_3_1')
@@ -391,7 +392,7 @@ describe Ci::CreatePipelineService do
end
context 'when an build is waiting on an interruptible scheduled task' do
- it 'cancels running outdated pipelines' do
+ it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
allow(Ci::BuildScheduleWorker).to receive(:perform_at)
pipeline_on_previous_commit
@@ -407,7 +408,7 @@ describe Ci::CreatePipelineService do
end
context 'when a uninterruptible build has finished' do
- it 'does not cancel running outdated pipelines' do
+ it 'does not cancel running outdated pipelines', :sidekiq_might_not_need_inline do
pipeline_on_previous_commit
.builds
.find_by_name('build_3_1')
@@ -474,6 +475,66 @@ describe Ci::CreatePipelineService do
end
end
+ context 'config evaluation' do
+ context 'when config is in a file in repository' do
+ before do
+ content = YAML.dump(rspec: { script: 'echo' })
+ stub_ci_pipeline_yaml_file(content)
+ end
+
+ it 'pull it from the repository' do
+ pipeline = execute_service
+ expect(pipeline).to be_repository_source
+ expect(pipeline.builds.map(&:name)).to eq ['rspec']
+ end
+ end
+
+ context 'when config is from Auto-DevOps' do
+ before do
+ stub_ci_pipeline_yaml_file(nil)
+ allow_any_instance_of(Project).to receive(:auto_devops_enabled?).and_return(true)
+ end
+
+ it 'pull it from Auto-DevOps' do
+ pipeline = execute_service
+ expect(pipeline).to be_auto_devops_source
+ expect(pipeline.builds.map(&:name)).to eq %w[test code_quality build]
+ end
+ end
+
+ context 'when config is not found' do
+ before do
+ stub_ci_pipeline_yaml_file(nil)
+ end
+
+ it 'attaches errors to the pipeline' do
+ pipeline = execute_service
+
+ expect(pipeline.errors.full_messages).to eq ['Missing .gitlab-ci.yml file']
+ expect(pipeline).not_to be_persisted
+ end
+ end
+
+ context 'when an unexpected error is raised' do
+ before do
+ expect(Gitlab::Ci::YamlProcessor).to receive(:new)
+ .and_raise(RuntimeError, 'undefined failure')
+ end
+
+ it 'saves error in pipeline' do
+ pipeline = execute_service
+
+ expect(pipeline.yaml_errors).to include('Undefined error')
+ end
+
+ it 'logs error' do
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+
+ execute_service
+ end
+ end
+ end
+
context 'when yaml is invalid' do
let(:ci_yaml) { 'invalid: file: fiile' }
let(:message) { 'Message' }
@@ -539,6 +600,25 @@ describe Ci::CreatePipelineService do
end
end
+ context 'when an unexpected error is raised' do
+ before do
+ expect(Gitlab::Ci::YamlProcessor).to receive(:new)
+ .and_raise(RuntimeError, 'undefined failure')
+ end
+
+ it 'saves error in pipeline' do
+ pipeline = execute_service
+
+ expect(pipeline.yaml_errors).to include('Undefined error')
+ end
+
+ it 'logs error' do
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+
+ execute_service
+ end
+ end
+
context 'when commit contains a [ci skip] directive' do
let(:message) { "some message[ci skip]" }
@@ -773,8 +853,8 @@ describe Ci::CreatePipelineService do
it 'correctly creates builds with auto-retry value configured' do
expect(pipeline).to be_persisted
- expect(rspec_job.retries_max).to eq 2
- expect(rspec_job.retry_when).to eq ['always']
+ expect(rspec_job.options_retry_max).to eq 2
+ expect(rspec_job.options_retry_when).to eq ['always']
end
end
@@ -783,8 +863,8 @@ describe Ci::CreatePipelineService do
it 'correctly creates builds with auto-retry value configured' do
expect(pipeline).to be_persisted
- expect(rspec_job.retries_max).to eq 2
- expect(rspec_job.retry_when).to eq ['runner_system_failure']
+ expect(rspec_job.options_retry_max).to eq 2
+ expect(rspec_job.options_retry_when).to eq ['runner_system_failure']
end
end
end
@@ -1236,7 +1316,7 @@ describe Ci::CreatePipelineService do
let!(:project) { fork_project(target_project, nil, repository: true) }
let!(:target_project) { create(:project, :repository) }
- it 'creates a legacy detached merge request pipeline in the forked project' do
+ it 'creates a legacy detached merge request pipeline in the forked project', :sidekiq_might_not_need_inline do
expect(pipeline).to be_persisted
expect(project.ci_pipelines).to eq([pipeline])
expect(target_project.ci_pipelines).to be_empty
diff --git a/spec/services/ci/find_exposed_artifacts_service_spec.rb b/spec/services/ci/find_exposed_artifacts_service_spec.rb
new file mode 100644
index 00000000000..f6309822fe0
--- /dev/null
+++ b/spec/services/ci/find_exposed_artifacts_service_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::FindExposedArtifactsService do
+ include Gitlab::Routing
+
+ let(:metadata) do
+ Gitlab::Ci::Build::Artifacts::Metadata
+ .new(metadata_file_stream, path, { recursive: true })
+ end
+
+ let(:metadata_file_stream) do
+ File.open(Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz')
+ end
+
+ let_it_be(:project) { create(:project) }
+ let(:user) { nil }
+
+ after do
+ metadata_file_stream&.close
+ end
+
+ def create_job_with_artifacts(options)
+ create(:ci_build, pipeline: pipeline, options: options).tap do |job|
+ create(:ci_job_artifact, :metadata, job: job)
+ end
+ end
+
+ describe '#for_pipeline' do
+ shared_examples 'finds a single match' do
+ it 'returns the artifact with exact location' do
+ expect(subject).to eq([{
+ text: 'Exposed artifact',
+ url: file_project_job_artifacts_path(project, job, 'other_artifacts_0.1.2/doc_sample.txt'),
+ job_name: job.name,
+ job_path: project_job_path(project, job)
+ }])
+ end
+ end
+
+ shared_examples 'finds multiple matches' do
+ it 'returns the path to the artifacts browser' do
+ expect(subject).to eq([{
+ text: 'Exposed artifact',
+ url: browse_project_job_artifacts_path(project, job),
+ job_name: job.name,
+ job_path: project_job_path(project, job)
+ }])
+ end
+ end
+
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ subject { described_class.new(project, user).for_pipeline(pipeline) }
+
+ context 'with jobs having at most 1 matching exposed artifact' do
+ let!(:job) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'Exposed artifact',
+ paths: ['other_artifacts_0.1.2/doc_sample.txt', 'something-else.html']
+ })
+ end
+
+ it_behaves_like 'finds a single match'
+ end
+
+ context 'with jobs having more than 1 matching exposed artifacts' do
+ let!(:job) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'Exposed artifact',
+ paths: [
+ 'ci_artifacts.txt',
+ 'other_artifacts_0.1.2/doc_sample.txt',
+ 'something-else.html'
+ ]
+ })
+ end
+
+ it_behaves_like 'finds multiple matches'
+ end
+
+ context 'with jobs having more than 1 matching exposed artifacts inside a directory' do
+ let!(:job) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'Exposed artifact',
+ paths: ['tests_encoding/']
+ })
+ end
+
+ it_behaves_like 'finds multiple matches'
+ end
+
+ context 'with jobs having paths with glob expression' do
+ let!(:job) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'Exposed artifact',
+ paths: ['other_artifacts_0.1.2/doc_sample.txt', 'tests_encoding/*.*']
+ })
+ end
+
+ it_behaves_like 'finds a single match' # because those with * are ignored
+ end
+
+ context 'limiting results' do
+ let!(:job1) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'artifact 1',
+ paths: ['ci_artifacts.txt']
+ })
+ end
+
+ let!(:job2) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'artifact 2',
+ paths: ['tests_encoding/']
+ })
+ end
+
+ let!(:job3) do
+ create_job_with_artifacts(artifacts: {
+ expose_as: 'should not be exposed',
+ paths: ['other_artifacts_0.1.2/doc_sample.txt']
+ })
+ end
+
+ subject { described_class.new(project, user).for_pipeline(pipeline, limit: 2) }
+
+ it 'returns first 2 results' do
+ expect(subject).to eq([
+ {
+ text: 'artifact 1',
+ url: file_project_job_artifacts_path(project, job1, 'ci_artifacts.txt'),
+ job_name: job1.name,
+ job_path: project_job_path(project, job1)
+ },
+ {
+ text: 'artifact 2',
+ url: browse_project_job_artifacts_path(project, job2),
+ job_name: job2.name,
+ job_path: project_job_path(project, job2)
+ }
+ ])
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index 05adec8b745..991f8cdfac5 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -27,7 +27,7 @@ describe Ci::ProcessPipelineService, '#execute' do
create_build('deploy', stage_idx: 2)
end
- it 'processes a pipeline' do
+ it 'processes a pipeline', :sidekiq_might_not_need_inline do
expect(process_pipeline).to be_truthy
succeed_pending
@@ -58,7 +58,7 @@ describe Ci::ProcessPipelineService, '#execute' do
create_build('test_job', stage_idx: 1, allow_failure: true)
end
- it 'automatically triggers a next stage when build finishes' do
+ it 'automatically triggers a next stage when build finishes', :sidekiq_might_not_need_inline do
expect(process_pipeline).to be_truthy
expect(builds_statuses).to eq ['pending']
@@ -72,7 +72,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when optional manual actions are defined' do
+ context 'when optional manual actions are defined', :sidekiq_might_not_need_inline do
before do
create_build('build', stage_idx: 0)
create_build('test', stage_idx: 1)
@@ -241,7 +241,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when delayed jobs are defined' do
+ context 'when delayed jobs are defined', :sidekiq_might_not_need_inline do
context 'when the scene is timed incremental rollout' do
before do
create_build('build', stage_idx: 0)
@@ -458,7 +458,7 @@ describe Ci::ProcessPipelineService, '#execute' do
process_pipeline
end
- it 'skips second stage and continues on third stage' do
+ it 'skips second stage and continues on third stage', :sidekiq_might_not_need_inline do
expect(all_builds_statuses).to eq(%w[pending created created])
builds.first.success
@@ -502,7 +502,7 @@ describe Ci::ProcessPipelineService, '#execute' do
play_manual_action('deploy')
end
- it 'queues the action and pipeline' do
+ it 'queues the action and pipeline', :sidekiq_might_not_need_inline do
expect(all_builds_statuses).to eq(%w[pending])
expect(pipeline.reload).to be_pending
@@ -510,7 +510,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when blocking manual actions are defined' do
+ context 'when blocking manual actions are defined', :sidekiq_might_not_need_inline do
before do
create_build('code:test', stage_idx: 0)
create_build('staging:deploy', stage_idx: 1, when: 'manual')
@@ -618,7 +618,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when second stage has only on_failure jobs' do
+ context 'when second stage has only on_failure jobs', :sidekiq_might_not_need_inline do
before do
create_build('check', stage_idx: 0)
create_build('build', stage_idx: 1, when: 'on_failure')
@@ -636,7 +636,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when failed build in the middle stage is retried' do
+ context 'when failed build in the middle stage is retried', :sidekiq_might_not_need_inline do
context 'when failed build is the only unsuccessful build in the stage' do
before do
create_build('build:1', stage_idx: 0)
@@ -683,7 +683,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when builds with auto-retries are configured' do
+ context 'when builds with auto-retries are configured', :sidekiq_might_not_need_inline do
before do
create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
@@ -712,7 +712,7 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when pipeline with needs is created' do
+ context 'when pipeline with needs is created', :sidekiq_might_not_need_inline do
let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 2f2c525ccc4..04334fb8915 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -502,6 +502,57 @@ module Ci
end
end
+ context 'when build has data integrity problem' do
+ let!(:pending_job) do
+ create(:ci_build, :pending, pipeline: pipeline)
+ end
+
+ before do
+ pending_job.update_columns(options: "string")
+ end
+
+ subject { execute(specific_runner, {}) }
+
+ it 'does drop the build and logs both failures' do
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
+ .with(anything, a_hash_including(extra: a_hash_including(build_id: pending_job.id)))
+ .twice
+ .and_call_original
+
+ expect(subject).to be_nil
+
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_data_integrity_failure
+ end
+ end
+
+ context 'when build fails to be run!' do
+ let!(:pending_job) do
+ create(:ci_build, :pending, pipeline: pipeline)
+ end
+
+ before do
+ expect_any_instance_of(Ci::Build).to receive(:run!)
+ .and_raise(RuntimeError, 'scheduler error')
+ end
+
+ subject { execute(specific_runner, {}) }
+
+ it 'does drop the build and logs failure' do
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
+ .with(anything, a_hash_including(extra: a_hash_including(build_id: pending_job.id)))
+ .once
+ .and_call_original
+
+ expect(subject).to be_nil
+
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_scheduler_failure
+ end
+ end
+
context 'when an exception is raised during a persistent ref creation' do
before do
allow_any_instance_of(Ci::PersistentRef).to receive(:exist?) { false }
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index 8dd573c3698..bdacb9ce071 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -132,6 +132,34 @@ describe Clusters::Applications::CreateService do
expect(subject.hostname).to eq('example.com')
end
end
+
+ context 'elastic stack application' do
+ let(:params) do
+ {
+ application: 'elastic_stack',
+ kibana_hostname: 'example.com'
+ }
+ end
+
+ before do
+ create(:clusters_applications_ingress, :installed, external_ip: "127.0.0.0", cluster: cluster)
+ expect_any_instance_of(Clusters::Applications::ElasticStack)
+ .to receive(:make_scheduled!)
+ .and_call_original
+ end
+
+ it 'creates the application' do
+ expect do
+ subject
+
+ cluster.reload
+ end.to change(cluster, :application_elastic_stack)
+ end
+
+ it 'sets the kibana_hostname' do
+ expect(subject.kibana_hostname).to eq('example.com')
+ end
+ end
end
context 'invalid application' do
diff --git a/spec/services/clusters/aws/fetch_credentials_service_spec.rb b/spec/services/clusters/aws/fetch_credentials_service_spec.rb
new file mode 100644
index 00000000000..726d1c30603
--- /dev/null
+++ b/spec/services/clusters/aws/fetch_credentials_service_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Aws::FetchCredentialsService do
+ describe '#execute' do
+ let(:user) { create(:user) }
+ let(:provider) { create(:cluster_provider_aws) }
+
+ let(:gitlab_access_key_id) { 'gitlab-access-key-id' }
+ let(:gitlab_secret_access_key) { 'gitlab-secret-access-key' }
+
+ let(:region) { 'us-east-1' }
+ let(:gitlab_credentials) { Aws::Credentials.new(gitlab_access_key_id, gitlab_secret_access_key) }
+ let(:sts_client) { Aws::STS::Client.new(credentials: gitlab_credentials, region: region) }
+ let(:assumed_role) { instance_double(Aws::AssumeRoleCredentials, credentials: assumed_role_credentials) }
+
+ let(:assumed_role_credentials) { double }
+
+ subject { described_class.new(provision_role, region: region, provider: provider).execute }
+
+ context 'provision role is configured' do
+ let(:provision_role) { create(:aws_role, user: user) }
+
+ before do
+ stub_application_setting(eks_access_key_id: gitlab_access_key_id)
+ stub_application_setting(eks_secret_access_key: gitlab_secret_access_key)
+
+ expect(Aws::Credentials).to receive(:new)
+ .with(gitlab_access_key_id, gitlab_secret_access_key)
+ .and_return(gitlab_credentials)
+
+ expect(Aws::STS::Client).to receive(:new)
+ .with(credentials: gitlab_credentials, region: region)
+ .and_return(sts_client)
+
+ expect(Aws::AssumeRoleCredentials).to receive(:new)
+ .with(
+ client: sts_client,
+ role_arn: provision_role.role_arn,
+ role_session_name: session_name,
+ external_id: provision_role.role_external_id
+ ).and_return(assumed_role)
+ end
+
+ context 'provider is specified' do
+ let(:session_name) { "gitlab-eks-cluster-#{provider.cluster_id}-user-#{user.id}" }
+
+ it { is_expected.to eq assumed_role_credentials }
+ end
+
+ context 'provider is not specifed' do
+ let(:provider) { nil }
+ let(:session_name) { "gitlab-eks-autofill-user-#{user.id}" }
+
+ it { is_expected.to eq assumed_role_credentials }
+ end
+ end
+
+ context 'provision role is not configured' do
+ let(:provision_role) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::MissingRoleError, 'AWS provisioning role not configured')
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/aws/finalize_creation_service_spec.rb b/spec/services/clusters/aws/finalize_creation_service_spec.rb
new file mode 100644
index 00000000000..8d7341483e3
--- /dev/null
+++ b/spec/services/clusters/aws/finalize_creation_service_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Aws::FinalizeCreationService do
+ describe '#execute' do
+ let(:provider) { create(:cluster_provider_aws, :creating) }
+ let(:platform) { provider.cluster.platform_kubernetes }
+
+ let(:create_service_account_service) { double(execute: true) }
+ let(:fetch_token_service) { double(execute: gitlab_token) }
+ let(:kube_client) { double(create_config_map: true) }
+ let(:cluster_stack) { double(outputs: [endpoint_output, cert_output, node_role_output]) }
+ let(:node_auth_config_map) { double }
+
+ let(:endpoint_output) { double(output_key: 'ClusterEndpoint', output_value: api_url) }
+ let(:cert_output) { double(output_key: 'ClusterCertificate', output_value: Base64.encode64(ca_pem)) }
+ let(:node_role_output) { double(output_key: 'NodeInstanceRole', output_value: node_role) }
+
+ let(:api_url) { 'https://kubernetes.example.com' }
+ let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem')) }
+ let(:gitlab_token) { 'gitlab-token' }
+ let(:iam_token) { 'iam-token' }
+ let(:node_role) { 'arn::aws::iam::123456789012:role/node-role' }
+
+ subject { described_class.new.execute(provider) }
+
+ before do
+ allow(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:gitlab_creator)
+ .with(kube_client, rbac: true)
+ .and_return(create_service_account_service)
+
+ allow(Clusters::Kubernetes::FetchKubernetesTokenService).to receive(:new)
+ .with(
+ kube_client,
+ Clusters::Kubernetes::GITLAB_ADMIN_TOKEN_NAME,
+ Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAMESPACE)
+ .and_return(fetch_token_service)
+
+ allow(Gitlab::Kubernetes::KubeClient).to receive(:new)
+ .with(
+ api_url,
+ auth_options: { bearer_token: iam_token },
+ ssl_options: {
+ verify_ssl: OpenSSL::SSL::VERIFY_PEER,
+ cert_store: instance_of(OpenSSL::X509::Store)
+ },
+ http_proxy_uri: nil
+ )
+ .and_return(kube_client)
+
+ allow(provider.api_client).to receive(:describe_stacks)
+ .with(stack_name: provider.cluster.name)
+ .and_return(double(stacks: [cluster_stack]))
+
+ allow(Kubeclient::AmazonEksCredentials).to receive(:token)
+ .with(provider.credentials, provider.cluster.name)
+ .and_return(iam_token)
+
+ allow(Gitlab::Kubernetes::ConfigMaps::AwsNodeAuth).to receive(:new)
+ .with(node_role).and_return(double(generate: node_auth_config_map))
+ end
+
+ it 'configures the provider and platform' do
+ subject
+
+ expect(provider).to be_created
+ expect(platform.api_url).to eq(api_url)
+ expect(platform.ca_pem).to eq(ca_pem)
+ expect(platform.token).to eq(gitlab_token)
+ expect(platform).to be_rbac
+ end
+
+ it 'calls the create_service_account_service' do
+ expect(create_service_account_service).to receive(:execute).once
+
+ subject
+ end
+
+ it 'configures cluster node authentication' do
+ expect(kube_client).to receive(:create_config_map).with(node_auth_config_map).once
+
+ subject
+ end
+
+ describe 'error handling' do
+ shared_examples 'provision error' do |message|
+ it "sets the status to :errored with an appropriate error message" do
+ subject
+
+ expect(provider).to be_errored
+ expect(provider.status_reason).to include(message)
+ end
+ end
+
+ context 'failed to request stack details from AWS' do
+ before do
+ allow(provider.api_client).to receive(:describe_stacks)
+ .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, "Error message"))
+ end
+
+ include_examples 'provision error', 'Failed to fetch CloudFormation stack'
+ end
+
+ context 'failed to create auth config map' do
+ before do
+ allow(kube_client).to receive(:create_config_map)
+ .and_raise(Kubeclient::HttpError.new(500, 'Error', nil))
+ end
+
+ include_examples 'provision error', 'Failed to run Kubeclient'
+ end
+
+ context 'failed to save records' do
+ before do
+ allow(provider.cluster).to receive(:save!)
+ .and_raise(ActiveRecord::RecordInvalid)
+ end
+
+ include_examples 'provision error', 'Failed to configure EKS provider'
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/aws/provision_service_spec.rb b/spec/services/clusters/aws/provision_service_spec.rb
new file mode 100644
index 00000000000..927ffaef002
--- /dev/null
+++ b/spec/services/clusters/aws/provision_service_spec.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Aws::ProvisionService do
+ describe '#execute' do
+ let(:provider) { create(:cluster_provider_aws) }
+
+ let(:provision_role) { create(:aws_role, user: provider.created_by_user) }
+ let(:client) { instance_double(Aws::CloudFormation::Client, create_stack: true) }
+ let(:cloudformation_template) { double }
+ let(:credentials) do
+ instance_double(
+ Aws::Credentials,
+ access_key_id: 'key',
+ secret_access_key: 'secret',
+ session_token: 'token'
+ )
+ end
+
+ let(:parameters) do
+ [
+ { parameter_key: 'ClusterName', parameter_value: provider.cluster.name },
+ { parameter_key: 'ClusterRole', parameter_value: provider.role_arn },
+ { parameter_key: 'ClusterControlPlaneSecurityGroup', parameter_value: provider.security_group_id },
+ { parameter_key: 'VpcId', parameter_value: provider.vpc_id },
+ { parameter_key: 'Subnets', parameter_value: provider.subnet_ids.join(',') },
+ { parameter_key: 'NodeAutoScalingGroupDesiredCapacity', parameter_value: provider.num_nodes.to_s },
+ { parameter_key: 'NodeInstanceType', parameter_value: provider.instance_type },
+ { parameter_key: 'KeyName', parameter_value: provider.key_name }
+ ]
+ end
+
+ subject { described_class.new.execute(provider) }
+
+ before do
+ allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
+ .with(provision_role, provider: provider, region: provider.region)
+ .and_return(double(execute: credentials))
+
+ allow(provider).to receive(:api_client)
+ .and_return(client)
+
+ allow(File).to receive(:read)
+ .with(Rails.root.join('vendor', 'aws', 'cloudformation', 'eks_cluster.yaml'))
+ .and_return(cloudformation_template)
+ end
+
+ it 'updates the provider status to :creating and configures the provider with credentials' do
+ subject
+
+ expect(provider).to be_creating
+ expect(provider.access_key_id).to eq 'key'
+ expect(provider.secret_access_key).to eq 'secret'
+ expect(provider.session_token).to eq 'token'
+ end
+
+ it 'creates a CloudFormation stack' do
+ expect(client).to receive(:create_stack).with(
+ stack_name: provider.cluster.name,
+ template_body: cloudformation_template,
+ parameters: parameters,
+ capabilities: ["CAPABILITY_IAM"]
+ )
+
+ subject
+ end
+
+ it 'schedules a worker to monitor creation status' do
+ expect(WaitForClusterCreationWorker).to receive(:perform_in)
+ .with(Clusters::Aws::VerifyProvisionStatusService::INITIAL_INTERVAL, provider.cluster_id)
+
+ subject
+ end
+
+ describe 'error handling' do
+ shared_examples 'provision error' do |message|
+ it "sets the status to :errored with an appropriate error message" do
+ subject
+
+ expect(provider).to be_errored
+ expect(provider.status_reason).to include(message)
+ end
+ end
+
+ context 'invalid state transition' do
+ before do
+ allow(provider).to receive(:make_creating).and_return(false)
+ end
+
+ include_examples 'provision error', 'Failed to update provider record'
+ end
+
+ context 'AWS role is not configured' do
+ before do
+ allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
+ .and_raise(Clusters::Aws::FetchCredentialsService::MissingRoleError)
+ end
+
+ include_examples 'provision error', 'Amazon role is not configured'
+ end
+
+ context 'AWS credentials are not configured' do
+ before do
+ allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
+ .and_raise(Aws::Errors::MissingCredentialsError)
+ end
+
+ include_examples 'provision error', 'Amazon credentials are not configured'
+ end
+
+ context 'Authentication failure' do
+ before do
+ allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
+ .and_raise(Aws::STS::Errors::ServiceError.new(double, 'Error message'))
+ end
+
+ include_examples 'provision error', 'Amazon authentication failed'
+ end
+
+ context 'CloudFormation failure' do
+ before do
+ allow(client).to receive(:create_stack)
+ .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, 'Error message'))
+ end
+
+ include_examples 'provision error', 'Amazon CloudFormation request failed'
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/aws/proxy_service_spec.rb b/spec/services/clusters/aws/proxy_service_spec.rb
new file mode 100644
index 00000000000..7b0e0512b95
--- /dev/null
+++ b/spec/services/clusters/aws/proxy_service_spec.rb
@@ -0,0 +1,210 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Aws::ProxyService do
+ let(:role) { create(:aws_role) }
+ let(:credentials) { instance_double(Aws::Credentials) }
+ let(:client_instance) { instance_double(client) }
+
+ let(:region) { 'region' }
+ let(:vpc_id) { }
+ let(:params) do
+ ActionController::Parameters.new({
+ resource: resource,
+ region: region,
+ vpc_id: vpc_id
+ })
+ end
+
+ subject { described_class.new(role, params: params).execute }
+
+ context 'external resources' do
+ before do
+ allow(Clusters::Aws::FetchCredentialsService).to receive(:new) do
+ double(execute: credentials)
+ end
+
+ allow(client).to receive(:new)
+ .with(
+ credentials: credentials, region: region,
+ http_open_timeout: 5, http_read_timeout: 10)
+ .and_return(client_instance)
+ end
+
+ shared_examples 'bad request' do
+ it 'returns an empty hash' do
+ expect(subject.status).to eq :bad_request
+ expect(subject.body).to eq({})
+ end
+ end
+
+ describe 'key_pairs' do
+ let(:client) { Aws::EC2::Client }
+ let(:resource) { 'key_pairs' }
+ let(:response) { double(to_hash: :key_pairs) }
+
+ it 'requests a list of key pairs' do
+ expect(client_instance).to receive(:describe_key_pairs).once.and_return(response)
+ expect(subject.status).to eq :ok
+ expect(subject.body).to eq :key_pairs
+ end
+ end
+
+ describe 'roles' do
+ let(:client) { Aws::IAM::Client }
+ let(:resource) { 'roles' }
+ let(:response) { double(to_hash: :roles) }
+
+ it 'requests a list of roles' do
+ expect(client_instance).to receive(:list_roles).once.and_return(response)
+ expect(subject.status).to eq :ok
+ expect(subject.body).to eq :roles
+ end
+ end
+
+ describe 'regions' do
+ let(:client) { Aws::EC2::Client }
+ let(:resource) { 'regions' }
+ let(:response) { double(to_hash: :regions) }
+
+ it 'requests a list of regions' do
+ expect(client_instance).to receive(:describe_regions).once.and_return(response)
+ expect(subject.status).to eq :ok
+ expect(subject.body).to eq :regions
+ end
+ end
+
+ describe 'security_groups' do
+ let(:client) { Aws::EC2::Client }
+ let(:resource) { 'security_groups' }
+ let(:response) { double(to_hash: :security_groups) }
+
+ include_examples 'bad request'
+
+ context 'VPC is specified' do
+ let(:vpc_id) { 'vpc-1' }
+
+ it 'requests a list of security groups for a VPC' do
+ expect(client_instance).to receive(:describe_security_groups).once
+ .with(filters: [{ name: 'vpc-id', values: [vpc_id] }])
+ .and_return(response)
+ expect(subject.status).to eq :ok
+ expect(subject.body).to eq :security_groups
+ end
+ end
+ end
+
+ describe 'subnets' do
+ let(:client) { Aws::EC2::Client }
+ let(:resource) { 'subnets' }
+ let(:response) { double(to_hash: :subnets) }
+
+ include_examples 'bad request'
+
+ context 'VPC is specified' do
+ let(:vpc_id) { 'vpc-1' }
+
+ it 'requests a list of subnets for a VPC' do
+ expect(client_instance).to receive(:describe_subnets).once
+ .with(filters: [{ name: 'vpc-id', values: [vpc_id] }])
+ .and_return(response)
+ expect(subject.status).to eq :ok
+ expect(subject.body).to eq :subnets
+ end
+ end
+ end
+
+ describe 'vpcs' do
+ let(:client) { Aws::EC2::Client }
+ let(:resource) { 'vpcs' }
+ let(:response) { double(to_hash: :vpcs) }
+
+ it 'requests a list of VPCs' do
+ expect(client_instance).to receive(:describe_vpcs).once.and_return(response)
+ expect(subject.status).to eq :ok
+ expect(subject.body).to eq :vpcs
+ end
+ end
+
+ context 'errors' do
+ let(:client) { Aws::EC2::Client }
+
+ context 'unknown resource' do
+ let(:resource) { 'instances' }
+
+ include_examples 'bad request'
+ end
+
+ context 'client and configuration errors' do
+ let(:resource) { 'vpcs' }
+
+ before do
+ allow(client_instance).to receive(:describe_vpcs).and_raise(error)
+ end
+
+ context 'error fetching credentials' do
+ let(:error) { Aws::STS::Errors::ServiceError.new(nil, 'error message') }
+
+ include_examples 'bad request'
+ end
+
+ context 'credentials not configured' do
+ let(:error) { Aws::Errors::MissingCredentialsError.new('error message') }
+
+ include_examples 'bad request'
+ end
+
+ context 'role not configured' do
+ let(:error) { Clusters::Aws::FetchCredentialsService::MissingRoleError.new('error message') }
+
+ include_examples 'bad request'
+ end
+
+ context 'EC2 error' do
+ let(:error) { Aws::EC2::Errors::ServiceError.new(nil, 'error message') }
+
+ include_examples 'bad request'
+ end
+
+ context 'IAM error' do
+ let(:error) { Aws::IAM::Errors::ServiceError.new(nil, 'error message') }
+
+ include_examples 'bad request'
+ end
+
+ context 'STS error' do
+ let(:error) { Aws::STS::Errors::ServiceError.new(nil, 'error message') }
+
+ include_examples 'bad request'
+ end
+ end
+ end
+ end
+
+ context 'local resources' do
+ describe 'instance_types' do
+ let(:resource) { 'instance_types' }
+ let(:cloudformation_template) { double }
+ let(:instance_types) { double(dig: %w(t3.small)) }
+
+ before do
+ allow(File).to receive(:read)
+ .with(Rails.root.join('vendor', 'aws', 'cloudformation', 'eks_cluster.yaml'))
+ .and_return(cloudformation_template)
+
+ allow(YAML).to receive(:safe_load)
+ .with(cloudformation_template)
+ .and_return(instance_types)
+ end
+
+ it 'returns a list of instance types' do
+ expect(subject.status).to eq :ok
+ expect(subject.body).to have_key(:instance_types)
+ expect(subject.body[:instance_types]).to match_array([
+ instance_type_name: 't3.small'
+ ])
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/aws/verify_provision_status_service_spec.rb b/spec/services/clusters/aws/verify_provision_status_service_spec.rb
new file mode 100644
index 00000000000..b62b0875bf3
--- /dev/null
+++ b/spec/services/clusters/aws/verify_provision_status_service_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Aws::VerifyProvisionStatusService do
+ describe '#execute' do
+ let(:provider) { create(:cluster_provider_aws) }
+
+ let(:stack) { double(stack_status: stack_status, creation_time: creation_time) }
+ let(:creation_time) { 1.minute.ago }
+
+ subject { described_class.new.execute(provider) }
+
+ before do
+ allow(provider.api_client).to receive(:describe_stacks)
+ .with(stack_name: provider.cluster.name)
+ .and_return(double(stacks: [stack]))
+ end
+
+ shared_examples 'provision error' do |message|
+ it "sets the status to :errored with an appropriate error message" do
+ subject
+
+ expect(provider).to be_errored
+ expect(provider.status_reason).to include(message)
+ end
+ end
+
+ context 'stack creation is still in progress' do
+ let(:stack_status) { 'CREATE_IN_PROGRESS' }
+ let(:verify_service) { double(execute: true) }
+
+ it 'schedules a worker to check again later' do
+ expect(WaitForClusterCreationWorker).to receive(:perform_in)
+ .with(described_class::POLL_INTERVAL, provider.cluster_id)
+
+ subject
+ end
+
+ context 'stack creation is taking too long' do
+ let(:creation_time) { 1.hour.ago }
+
+ include_examples 'provision error', 'Kubernetes cluster creation time exceeds timeout'
+ end
+ end
+
+ context 'stack creation is complete' do
+ let(:stack_status) { 'CREATE_COMPLETE' }
+ let(:finalize_service) { double(execute: true) }
+
+ it 'finalizes creation' do
+ expect(Clusters::Aws::FinalizeCreationService).to receive(:new).and_return(finalize_service)
+ expect(finalize_service).to receive(:execute).with(provider).once
+
+ subject
+ end
+ end
+
+ context 'stack creation failed' do
+ let(:stack_status) { 'CREATE_FAILED' }
+
+ include_examples 'provision error', 'Unexpected status'
+ end
+
+ context 'error communicating with CloudFormation API' do
+ let(:stack_status) { 'CREATE_IN_PROGRESS' }
+
+ before do
+ allow(provider.api_client).to receive(:describe_stacks)
+ .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, 'Error message'))
+ end
+
+ include_examples 'provision error', 'Amazon CloudFormation request failed'
+ end
+ end
+end
diff --git a/spec/services/clusters/destroy_service_spec.rb b/spec/services/clusters/destroy_service_spec.rb
new file mode 100644
index 00000000000..c0fcc971500
--- /dev/null
+++ b/spec/services/clusters/destroy_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::DestroyService do
+ describe '#execute' do
+ subject { described_class.new(cluster.user, params).execute(cluster) }
+
+ let!(:cluster) { create(:cluster, :project, :provided_by_user) }
+
+ context 'when correct params' do
+ shared_examples 'only removes cluster' do
+ it 'does not start cleanup' do
+ expect(cluster).not_to receive(:start_cleanup)
+ subject
+ end
+
+ it 'destroys the cluster' do
+ subject
+ expect { cluster.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+ end
+
+ context 'when params are empty' do
+ let(:params) { {} }
+
+ it_behaves_like 'only removes cluster'
+ end
+
+ context 'when cleanup param is false' do
+ let(:params) { { cleanup: 'false' } }
+
+ it_behaves_like 'only removes cluster'
+ end
+
+ context 'when cleanup param is true' do
+ let(:params) { { cleanup: 'true' } }
+
+ before do
+ allow(Clusters::Cleanup::AppWorker).to receive(:perform_async)
+ end
+
+ it 'does not destroy cluster' do
+ subject
+ expect(Clusters::Cluster.where(id: cluster.id).exists?).not_to be_falsey
+ end
+
+ it 'transition cluster#cleanup_status from cleanup_not_started to uninstalling_applications' do
+ expect { subject }.to change { cluster.cleanup_status_name }
+ .from(:cleanup_not_started)
+ .to(:cleanup_uninstalling_applications)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index 5a3b1cd6cfb..291e63bbe4a 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -37,6 +37,8 @@ describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' do
stub_kubeclient_put_secret(api_url, "#{namespace}-token", namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
+ stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_get_secret(
api_url,
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index 10dbfc800ff..4df73fcc2ae 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -145,6 +145,8 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
stub_kubeclient_create_role_binding(api_url, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
+ stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
end
it_behaves_like 'creates service account and token'
@@ -172,6 +174,31 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
)
end
+ it 'creates a role binding granting crossplane database permissions to the service account' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME}").with(
+ body: hash_including(
+ metadata: {
+ name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME,
+ namespace: namespace
+ },
+ roleRef: {
+ apiGroup: 'rbac.authorization.k8s.io',
+ kind: 'Role',
+ name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME
+ },
+ subjects: [
+ {
+ kind: 'ServiceAccount',
+ name: service_account_name,
+ namespace: namespace
+ }
+ ]
+ )
+ )
+ end
+
it 'creates a role and role binding granting knative serving permissions to the service account' do
subject
@@ -189,6 +216,24 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
)
)
end
+
+ it 'creates a role and role binding granting crossplane database permissions to the service account' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME}").with(
+ body: hash_including(
+ metadata: {
+ name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME,
+ namespace: namespace
+ },
+ rules: [{
+ apiGroups: %w(database.crossplane.io),
+ resources: %w(postgresqlinstances),
+ verbs: %w(get list create watch)
+ }]
+ )
+ )
+ end
end
end
end
diff --git a/spec/services/clusters/update_service_spec.rb b/spec/services/clusters/update_service_spec.rb
index 3ee45375dca..fdbed4fa5d8 100644
--- a/spec/services/clusters/update_service_spec.rb
+++ b/spec/services/clusters/update_service_spec.rb
@@ -90,5 +90,132 @@ describe Clusters::UpdateService do
end
end
end
+
+ context 'when params includes :management_project_id' do
+ context 'management_project is non-existent' do
+ let(:params) do
+ { management_project_id: 0 }
+ end
+
+ it 'does not update management_project_id' do
+ is_expected.to eq(false)
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+
+ cluster.reload
+ expect(cluster.management_project_id).to be_nil
+ end
+ end
+
+ shared_examples 'setting a management project' do
+ context 'user is authorized to adminster manangement_project' do
+ before do
+ management_project.add_maintainer(cluster.user)
+ end
+
+ let(:params) do
+ { management_project_id: management_project.id }
+ end
+
+ it 'updates management_project_id' do
+ is_expected.to eq(true)
+
+ expect(cluster.management_project).to eq(management_project)
+ end
+ end
+
+ context 'user is not authorized to adminster manangement_project' do
+ let(:params) do
+ { management_project_id: management_project.id }
+ end
+
+ it 'does not update management_project_id' do
+ is_expected.to eq(false)
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+
+ cluster.reload
+ expect(cluster.management_project_id).to be_nil
+ end
+ end
+
+ context 'cluster already has a management project set' do
+ before do
+ cluster.update!(management_project: create(:project))
+ end
+
+ let(:params) do
+ { management_project_id: '' }
+ end
+
+ it 'unsets management_project_id' do
+ is_expected.to eq(true)
+
+ cluster.reload
+ expect(cluster.management_project_id).to be_nil
+ end
+ end
+ end
+
+ context 'project cluster' do
+ include_examples 'setting a management project' do
+ let(:management_project) { create(:project, namespace: cluster.first_project.namespace) }
+ end
+
+ context 'manangement_project is outside of the namespace scope' do
+ before do
+ management_project.update(group: create(:group))
+ end
+
+ let(:params) do
+ { management_project_id: management_project.id }
+ end
+
+ it 'does not update management_project_id' do
+ is_expected.to eq(false)
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+
+ cluster.reload
+ expect(cluster.management_project_id).to be_nil
+ end
+ end
+ end
+
+ context 'group cluster' do
+ let(:cluster) { create(:cluster, :group) }
+
+ include_examples 'setting a management project' do
+ let(:management_project) { create(:project, group: cluster.first_group) }
+ end
+
+ context 'manangement_project is outside of the namespace scope' do
+ before do
+ management_project.update(group: create(:group))
+ end
+
+ let(:params) do
+ { management_project_id: management_project.id }
+ end
+
+ it 'does not update management_project_id' do
+ is_expected.to eq(false)
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+
+ cluster.reload
+ expect(cluster.management_project_id).to be_nil
+ end
+ end
+ end
+
+ context 'instance cluster' do
+ let(:cluster) { create(:cluster, :instance) }
+
+ include_examples 'setting a management project' do
+ let(:management_project) { create(:project) }
+ end
+ end
+ end
end
end
diff --git a/spec/services/concerns/merge_requests/assigns_merge_params_spec.rb b/spec/services/concerns/merge_requests/assigns_merge_params_spec.rb
index 5b653aa331c..9cf7f354191 100644
--- a/spec/services/concerns/merge_requests/assigns_merge_params_spec.rb
+++ b/spec/services/concerns/merge_requests/assigns_merge_params_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe MergeRequests::AssignsMergeParams do
diff --git a/spec/services/create_branch_service_spec.rb b/spec/services/create_branch_service_spec.rb
index 0d34c7f9a82..9661173c9e7 100644
--- a/spec/services/create_branch_service_spec.rb
+++ b/spec/services/create_branch_service_spec.rb
@@ -22,5 +22,20 @@ describe CreateBranchService do
expect(project.repository.branch_exists?('my-feature')).to be_truthy
end
end
+
+ context 'when creating a branch fails' do
+ let(:project) { create(:project_empty_repo) }
+
+ before do
+ allow(project.repository).to receive(:add_branch).and_return(false)
+ end
+
+ it 'retruns an error with the branch name' do
+ result = service.execute('my-feature', 'master')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("Invalid reference name: my-feature")
+ end
+ end
end
end
diff --git a/spec/services/deployments/after_create_service_spec.rb b/spec/services/deployments/after_create_service_spec.rb
index b34483ea85b..94532ed81ae 100644
--- a/spec/services/deployments/after_create_service_spec.rb
+++ b/spec/services/deployments/after_create_service_spec.rb
@@ -53,6 +53,14 @@ describe Deployments::AfterCreateService do
service.execute
end
+ it 'links merge requests to deployment' do
+ expect_next_instance_of(Deployments::LinkMergeRequestsService, deployment) do |link_mr_service|
+ expect(link_mr_service).to receive(:execute)
+ end
+
+ service.execute
+ end
+
it 'returns the deployment' do
expect(subject.execute).to eq(deployment)
end
@@ -237,4 +245,30 @@ describe Deployments::AfterCreateService do
end
end
end
+
+ describe '#update_environment' do
+ it 'links the merge requests' do
+ double = instance_double(Deployments::LinkMergeRequestsService)
+
+ allow(Deployments::LinkMergeRequestsService)
+ .to receive(:new)
+ .with(deployment)
+ .and_return(double)
+
+ expect(double).to receive(:execute)
+
+ service.update_environment(deployment)
+ end
+
+ context 'when the tracking of merge requests is disabled' do
+ it 'does nothing' do
+ stub_feature_flags(deployment_merge_requests: false)
+
+ expect(Deployments::LinkMergeRequestsService)
+ .not_to receive(:new)
+
+ service.update_environment(deployment)
+ end
+ end
+ end
end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
new file mode 100644
index 00000000000..ba069658dfd
--- /dev/null
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Deployments::LinkMergeRequestsService do
+ describe '#execute' do
+ context 'when the deployment did not succeed' do
+ it 'does nothing' do
+ deploy = create(:deployment, :failed)
+
+ expect(deploy).not_to receive(:link_merge_requests)
+
+ described_class.new(deploy).execute
+ end
+ end
+
+ context 'when there is a previous deployment' do
+ it 'links all merge requests merged since the previous deployment' do
+ deploy1 = create(:deployment, :success, sha: 'foo')
+ deploy2 = create(
+ :deployment,
+ :success,
+ sha: 'bar',
+ project: deploy1.project,
+ environment: deploy1.environment
+ )
+
+ service = described_class.new(deploy2)
+
+ expect(service)
+ .to receive(:link_merge_requests_for_range)
+ .with('foo', 'bar')
+
+ service.execute
+ end
+ end
+
+ context 'when there are no previous deployments' do
+ it 'links all merged merge requests' do
+ deploy = create(:deployment, :success)
+ service = described_class.new(deploy)
+
+ expect(service).to receive(:link_all_merged_merge_requests)
+
+ service.execute
+ end
+ end
+ end
+
+ describe '#link_merge_requests_for_range' do
+ it 'links merge requests' do
+ project = create(:project, :repository)
+ environment = create(:environment, project: project)
+ deploy =
+ create(:deployment, :success, project: project, environment: environment)
+
+ mr1 = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: '1e292f8fedd741b75372e19097c76d327140c312',
+ source_project: project,
+ target_project: project
+ )
+
+ mr2 = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: '2d1db523e11e777e49377cfb22d368deec3f0793',
+ source_project: project,
+ target_project: project
+ )
+
+ described_class.new(deploy).link_merge_requests_for_range(
+ '7975be0116940bf2ad4321f79d02a55c5f7779aa',
+ 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ )
+
+ expect(deploy.merge_requests).to include(mr1, mr2)
+ end
+ end
+
+ describe '#link_all_merged_merge_requests' do
+ it 'links all merged merge requests targeting the deployed branch' do
+ project = create(:project, :repository)
+ environment = create(:environment, project: project)
+ deploy =
+ create(:deployment, :success, project: project, environment: environment)
+
+ mr1 = create(
+ :merge_request,
+ :merged,
+ source_project: project,
+ target_project: project,
+ source_branch: 'source1',
+ target_branch: deploy.ref
+ )
+
+ mr2 = create(
+ :merge_request,
+ :merged,
+ source_project: project,
+ target_project: project,
+ source_branch: 'source2',
+ target_branch: deploy.ref
+ )
+
+ mr3 = create(
+ :merge_request,
+ :merged,
+ source_project: project,
+ target_project: project,
+ target_branch: 'foo'
+ )
+
+ described_class.new(deploy).link_all_merged_merge_requests
+
+ expect(deploy.merge_requests).to include(mr1, mr2)
+ expect(deploy.merge_requests).not_to include(mr3)
+ end
+ end
+end
diff --git a/spec/services/deployments/update_service_spec.rb b/spec/services/deployments/update_service_spec.rb
index a923099b82c..8a918d28ffd 100644
--- a/spec/services/deployments/update_service_spec.rb
+++ b/spec/services/deployments/update_service_spec.rb
@@ -3,13 +3,55 @@
require 'spec_helper'
describe Deployments::UpdateService do
- let(:deploy) { create(:deployment, :running) }
- let(:service) { described_class.new(deploy, status: 'success') }
+ let(:deploy) { create(:deployment) }
describe '#execute' do
- it 'updates the status of a deployment' do
- expect(service.execute).to eq(true)
- expect(deploy.status).to eq('success')
+ it 'can update the status to running' do
+ expect(described_class.new(deploy, status: 'running').execute)
+ .to be_truthy
+
+ expect(deploy).to be_running
+ end
+
+ it 'can update the status to success' do
+ expect(described_class.new(deploy, status: 'success').execute)
+ .to be_truthy
+
+ expect(deploy).to be_success
+ end
+
+ it 'can update the status to failed' do
+ expect(described_class.new(deploy, status: 'failed').execute)
+ .to be_truthy
+
+ expect(deploy).to be_failed
+ end
+
+ it 'can update the status to canceled' do
+ expect(described_class.new(deploy, status: 'canceled').execute)
+ .to be_truthy
+
+ expect(deploy).to be_canceled
+ end
+
+ it 'returns false when the status is not supported' do
+ expect(described_class.new(deploy, status: 'kittens').execute)
+ .to be_falsey
+ end
+
+ it 'links merge requests when changing the status to success', :sidekiq_inline do
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: deploy.project,
+ source_project: deploy.project,
+ target_branch: 'master',
+ source_branch: 'foo'
+ )
+
+ described_class.new(deploy, status: 'success').execute
+
+ expect(deploy.merge_requests).to eq([mr])
end
end
end
diff --git a/spec/services/error_tracking/issue_details_service_spec.rb b/spec/services/error_tracking/issue_details_service_spec.rb
new file mode 100644
index 00000000000..4d5505bb5a9
--- /dev/null
+++ b/spec/services/error_tracking/issue_details_service_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ErrorTracking::IssueDetailsService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:token) { 'test-token' }
+ let(:result) { subject.execute }
+
+ let(:error_tracking_setting) do
+ create(:project_error_tracking_setting, api_url: sentry_url, token: token, project: project)
+ end
+
+ subject { described_class.new(project, user) }
+
+ before do
+ expect(project).to receive(:error_tracking_setting).at_least(:once).and_return(error_tracking_setting)
+
+ project.add_reporter(user)
+ end
+
+ describe '#execute' do
+ context 'with authorized user' do
+ context 'when issue_details returns a detailed error' do
+ let(:detailed_error) { build(:detailed_error_tracking_error) }
+
+ before do
+ expect(error_tracking_setting)
+ .to receive(:issue_details).and_return(issue: detailed_error)
+ end
+
+ it 'returns the detailed error' do
+ expect(result).to eq(status: :success, issue: detailed_error)
+ end
+ end
+
+ include_examples 'error tracking service data not ready', :issue_details
+ include_examples 'error tracking service sentry error handling', :issue_details
+ include_examples 'error tracking service http status handling', :issue_details
+ end
+
+ include_examples 'error tracking service unauthorized user'
+ include_examples 'error tracking service disabled'
+ end
+end
diff --git a/spec/services/error_tracking/issue_latest_event_service_spec.rb b/spec/services/error_tracking/issue_latest_event_service_spec.rb
new file mode 100644
index 00000000000..cda15042814
--- /dev/null
+++ b/spec/services/error_tracking/issue_latest_event_service_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ErrorTracking::IssueLatestEventService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:token) { 'test-token' }
+ let(:result) { subject.execute }
+
+ let(:error_tracking_setting) do
+ create(:project_error_tracking_setting, api_url: sentry_url, token: token, project: project)
+ end
+
+ subject { described_class.new(project, user) }
+
+ before do
+ expect(project).to receive(:error_tracking_setting).at_least(:once).and_return(error_tracking_setting)
+
+ project.add_reporter(user)
+ end
+
+ describe '#execute' do
+ context 'with authorized user' do
+ context 'when issue_latest_event returns an error event' do
+ let(:error_event) { build(:error_tracking_error_event) }
+
+ before do
+ expect(error_tracking_setting)
+ .to receive(:issue_latest_event).and_return(latest_event: error_event)
+ end
+
+ it 'returns the error event' do
+ expect(result).to eq(status: :success, latest_event: error_event)
+ end
+ end
+
+ include_examples 'error tracking service data not ready', :issue_latest_event
+ include_examples 'error tracking service sentry error handling', :issue_latest_event
+ include_examples 'error tracking service http status handling', :issue_latest_event
+ end
+
+ include_examples 'error tracking service unauthorized user'
+ include_examples 'error tracking service disabled'
+ end
+end
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index 3a8f3069911..5b73bc91478 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -37,93 +37,20 @@ describe ErrorTracking::ListIssuesService do
end
end
- context 'when list_sentry_issues returns nil' do
- before do
- expect(error_tracking_setting)
- .to receive(:list_sentry_issues).and_return(nil)
- end
-
- it 'result is not ready' do
- expect(result).to eq(
- status: :error, http_status: :no_content, message: 'Not ready. Try again later')
- end
- end
-
- context 'when list_sentry_issues returns error' do
- before do
- allow(error_tracking_setting)
- .to receive(:list_sentry_issues)
- .and_return(
- error: 'Sentry response status code: 401',
- error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_NON_20X_RESPONSE
- )
- end
-
- it 'returns the error' do
- expect(result).to eq(
- status: :error,
- http_status: :bad_request,
- message: 'Sentry response status code: 401'
- )
- end
- end
-
- context 'when list_sentry_issues returns error with http_status' do
- before do
- allow(error_tracking_setting)
- .to receive(:list_sentry_issues)
- .and_return(
- error: 'Sentry API response is missing keys. key not found: "id"',
- error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_MISSING_KEYS
- )
- end
-
- it 'returns the error with correct http_status' do
- expect(result).to eq(
- status: :error,
- http_status: :internal_server_error,
- message: 'Sentry API response is missing keys. key not found: "id"'
- )
- end
- end
+ include_examples 'error tracking service data not ready', :list_sentry_issues
+ include_examples 'error tracking service sentry error handling', :list_sentry_issues
+ include_examples 'error tracking service http status handling', :list_sentry_issues
end
- context 'with unauthorized user' do
- let(:unauthorized_user) { create(:user) }
-
- subject { described_class.new(project, unauthorized_user) }
-
- it 'returns error' do
- result = subject.execute
-
- expect(result).to include(
- status: :error,
- message: 'Access denied',
- http_status: :unauthorized
- )
- end
- end
-
- context 'with error tracking disabled' do
- before do
- error_tracking_setting.enabled = false
- end
-
- it 'raises error' do
- result = subject.execute
-
- expect(result).to include(status: :error, message: 'Error Tracking is not enabled')
- end
- end
+ include_examples 'error tracking service unauthorized user'
+ include_examples 'error tracking service disabled'
end
- describe '#sentry_external_url' do
- let(:external_url) { 'https://sentrytest.gitlab.com/sentry-org/sentry-project' }
-
- it 'calls ErrorTracking::ProjectErrorTrackingSetting' do
- expect(error_tracking_setting).to receive(:sentry_external_url).and_call_original
+ describe '#external_url' do
+ it 'calls the project setting sentry_external_url' do
+ expect(error_tracking_setting).to receive(:sentry_external_url).and_return(sentry_url)
- subject.external_url
+ expect(subject.external_url).to eql sentry_url
end
end
end
diff --git a/spec/services/error_tracking/list_projects_service_spec.rb b/spec/services/error_tracking/list_projects_service_spec.rb
index a272a604184..cd4b835e097 100644
--- a/spec/services/error_tracking/list_projects_service_spec.rb
+++ b/spec/services/error_tracking/list_projects_service_spec.rb
@@ -127,7 +127,7 @@ describe ErrorTracking::ListProjectsService do
end
it 'returns error' do
- expect(result).to include(status: :error, message: 'access denied')
+ expect(result).to include(status: :error, message: 'Access denied', http_status: :unauthorized)
end
end
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 085b49f31ab..b1c64bc3c0a 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -345,7 +345,7 @@ describe Git::BranchHooksService do
end
end
- context 'when the project is forked' do
+ context 'when the project is forked', :sidekiq_might_not_need_inline do
let(:upstream_project) { project }
let(:forked_project) { fork_project(upstream_project, user, repository: true) }
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index bf68eb0af20..febd4992682 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -246,7 +246,7 @@ describe Git::BranchPushService, services: true do
allow(project.repository).to receive(:commits_between).and_return([commit])
end
- it "creates a note if a pushed commit mentions an issue" do
+ it "creates a note if a pushed commit mentions an issue", :sidekiq_might_not_need_inline do
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
@@ -260,7 +260,7 @@ describe Git::BranchPushService, services: true do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
- it "defaults to the pushing user if the commit's author is not known" do
+ it "defaults to the pushing user if the commit's author is not known", :sidekiq_might_not_need_inline do
allow(commit).to receive_messages(
author_name: 'unknown name',
author_email: 'unknown@email.com'
@@ -270,7 +270,7 @@ describe Git::BranchPushService, services: true do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
- it "finds references in the first push to a non-default branch" do
+ it "finds references in the first push to a non-default branch", :sidekiq_might_not_need_inline do
allow(project.repository).to receive(:commits_between).with(blankrev, newrev).and_return([])
allow(project.repository).to receive(:commits_between).with("master", newrev).and_return([commit])
@@ -305,7 +305,7 @@ describe Git::BranchPushService, services: true do
end
context "while saving the 'first_mentioned_in_commit_at' metric for an issue" do
- it 'sets the metric for referenced issues' do
+ it 'sets the metric for referenced issues', :sidekiq_might_not_need_inline do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
expect(issue.reload.metrics.first_mentioned_in_commit_at).to be_like_time(commit_time)
@@ -344,12 +344,12 @@ describe Git::BranchPushService, services: true do
end
context "to default branches" do
- it "closes issues" do
+ it "closes issues", :sidekiq_might_not_need_inline do
execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
expect(Issue.find(issue.id)).to be_closed
end
- it "adds a note indicating that the issue is now closed" do
+ it "adds a note indicating that the issue is now closed", :sidekiq_might_not_need_inline do
expect(SystemNoteService).to receive(:change_status).with(issue, project, commit_author, "closed", closing_commit)
execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
end
@@ -366,7 +366,7 @@ describe Git::BranchPushService, services: true do
allow(project).to receive(:default_branch).and_return('not-master')
end
- it "creates cross-reference notes" do
+ it "creates cross-reference notes", :sidekiq_might_not_need_inline do
expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author)
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
@@ -407,7 +407,7 @@ describe Git::BranchPushService, services: true do
context "mentioning an issue" do
let(:message) { "this is some work.\n\nrelated to JIRA-1" }
- it "initiates one api call to jira server to mention the issue" do
+ it "initiates one api call to jira server to mention the issue", :sidekiq_might_not_need_inline do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
@@ -434,7 +434,7 @@ describe Git::BranchPushService, services: true do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return("JIRA-1")
end
- context "using right markdown" do
+ context "using right markdown", :sidekiq_might_not_need_inline do
it "initiates one api call to jira server to close the issue" do
execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
@@ -473,7 +473,7 @@ describe Git::BranchPushService, services: true do
end
end
- context 'when internal issues are enabled' do
+ context 'when internal issues are enabled', :sidekiq_might_not_need_inline do
let(:issue) { create(:issue, project: project) }
let(:message) { "this is some work.\n\ncloses JIRA-1 \n\n closes #{issue.to_reference}" }
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index d13739cefd9..055d0243d4b 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -26,7 +26,7 @@ describe Groups::DestroyService do
end
shared_examples 'group destruction' do |async|
- context 'database records' do
+ context 'database records', :sidekiq_might_not_need_inline do
before do
destroy_group(group, user, async)
end
@@ -37,7 +37,7 @@ describe Groups::DestroyService do
it { expect(NotificationSetting.unscoped.all).not_to include(notification_setting) }
end
- context 'mattermost team' do
+ context 'mattermost team', :sidekiq_might_not_need_inline do
let!(:chat_team) { create(:chat_team, namespace: group) }
it 'destroys the team too' do
@@ -47,7 +47,7 @@ describe Groups::DestroyService do
end
end
- context 'file system' do
+ context 'file system', :sidekiq_might_not_need_inline do
context 'Sidekiq inline' do
before do
# Run sidekiq immediately to check that renamed dir will be removed
@@ -55,8 +55,8 @@ describe Groups::DestroyService do
end
it 'verifies that paths have been deleted' do
- expect(gitlab_shell.exists?(project.repository_storage, group.path)).to be_falsey
- expect(gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, group.path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
end
end
end
@@ -73,13 +73,13 @@ describe Groups::DestroyService do
after do
# Clean up stale directories
- gitlab_shell.rm_namespace(project.repository_storage, group.path)
- gitlab_shell.rm_namespace(project.repository_storage, remove_path)
+ TestEnv.rm_storage_dir(project.repository_storage, group.path)
+ TestEnv.rm_storage_dir(project.repository_storage, remove_path)
end
it 'verifies original paths and projects still exist' do
- expect(gitlab_shell.exists?(project.repository_storage, group.path)).to be_truthy
- expect(gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, group.path)).to be_truthy
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2)
end
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
new file mode 100644
index 00000000000..36faa69577e
--- /dev/null
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::GroupLinks::CreateService, '#execute' do
+ let(:parent_group_user) { create(:user) }
+ let(:group_user) { create(:user) }
+ let(:child_group_user) { create(:user) }
+
+ let_it_be(:group_parent) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: group_parent) }
+ let_it_be(:group_child) { create(:group, :private, parent: group) }
+
+ let_it_be(:shared_group_parent) { create(:group, :private) }
+ let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
+
+ let_it_be(:project_parent) { create(:project, group: shared_group_parent) }
+ let_it_be(:project) { create(:project, group: shared_group) }
+ let_it_be(:project_child) { create(:project, group: shared_group_child) }
+
+ let(:opts) do
+ {
+ shared_group_access: Gitlab::Access::DEVELOPER,
+ expires_at: nil
+ }
+ end
+ let(:user) { group_user }
+
+ subject { described_class.new(group, user, opts) }
+
+ before do
+ group.add_guest(group_user)
+ shared_group.add_owner(group_user)
+ end
+
+ it 'adds group to another group' do
+ expect { subject.execute(shared_group) }.to change { group.shared_group_links.count }.from(0).to(1)
+ end
+
+ it 'returns false if shared group is blank' do
+ expect { subject.execute(nil) }.not_to change { group.shared_group_links.count }
+ end
+
+ context 'user does not have access to group' do
+ let(:user) { create(:user) }
+
+ before do
+ shared_group.add_owner(user)
+ end
+
+ it 'returns error' do
+ result = subject.execute(shared_group)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(404)
+ end
+ end
+
+ context 'user does not have admin access to shared group' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_guest(user)
+ shared_group.add_developer(user)
+ end
+
+ it 'returns error' do
+ result = subject.execute(shared_group)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(404)
+ end
+ end
+
+ context 'group hierarchies' do
+ before do
+ group_parent.add_owner(parent_group_user)
+ group.add_owner(group_user)
+ group_child.add_owner(child_group_user)
+ end
+
+ context 'group user' do
+ let(:user) { group_user }
+
+ it 'create proper authorizations' do
+ subject.execute(shared_group)
+
+ expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project)).to be_truthy
+ expect(Ability.allowed?(user, :read_project, project_child)).to be_truthy
+ end
+ end
+
+ context 'parent group user' do
+ let(:user) { parent_group_user }
+
+ it 'create proper authorizations' do
+ subject.execute(shared_group)
+
+ expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey
+ end
+ end
+
+ context 'child group user' do
+ let(:user) { child_group_user }
+
+ it 'create proper authorizations' do
+ subject.execute(shared_group)
+
+ expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/services/groups/group_links/destroy_service_spec.rb b/spec/services/groups/group_links/destroy_service_spec.rb
new file mode 100644
index 00000000000..6f49b6eda94
--- /dev/null
+++ b/spec/services/groups/group_links/destroy_service_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::GroupLinks::DestroyService, '#execute' do
+ let(:user) { create(:user) }
+
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:shared_group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, group: shared_group) }
+
+ subject { described_class.new(nil, nil) }
+
+ context 'single link' do
+ let!(:link) { create(:group_group_link, shared_group: shared_group, shared_with_group: group) }
+
+ it 'destroys link' do
+ expect { subject.execute(link) }.to change { GroupGroupLink.count }.from(1).to(0)
+ end
+
+ it 'revokes project authorization' do
+ group.add_developer(user)
+
+ expect { subject.execute(link) }.to(
+ change { Ability.allowed?(user, :read_project, project) }.from(true).to(false))
+ end
+ end
+
+ context 'multiple links' do
+ let_it_be(:another_group) { create(:group, :private) }
+ let_it_be(:another_shared_group) { create(:group, :private) }
+
+ let!(:links) do
+ [
+ create(:group_group_link, shared_group: shared_group, shared_with_group: group),
+ create(:group_group_link, shared_group: shared_group, shared_with_group: another_group),
+ create(:group_group_link, shared_group: another_shared_group, shared_with_group: group),
+ create(:group_group_link, shared_group: another_shared_group, shared_with_group: another_group)
+ ]
+ end
+
+ it 'updates project authorization once per group' do
+ expect(GroupGroupLink).to receive(:delete)
+ expect(group).to receive(:refresh_members_authorized_projects).once
+ expect(another_group).to receive(:refresh_members_authorized_projects).once
+
+ subject.execute(links)
+ end
+
+ it 'rolls back changes when error happens' do
+ group.add_developer(user)
+
+ expect(group).to receive(:refresh_members_authorized_projects).once.and_call_original
+ expect(another_group).to(
+ receive(:refresh_members_authorized_projects).and_raise('boom'))
+
+ expect { subject.execute(links) }.to raise_error('boom')
+
+ expect(GroupGroupLink.count).to eq(links.length)
+ expect(Ability.allowed?(user, :read_project, project)).to be_truthy
+ end
+ end
+end
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
new file mode 100644
index 00000000000..2024e1ed457
--- /dev/null
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::ImportExport::ExportService do
+ describe '#execute' do
+ let!(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:export_path) { shared.export_path }
+ let(:service) { described_class.new(group: group, user: user, params: { shared: shared }) }
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ it 'saves the models' do
+ expect(Gitlab::ImportExport::GroupTreeSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
+
+ context 'when saver succeeds' do
+ it 'saves the group in the file system' do
+ service.execute
+
+ expect(group.import_export_upload.export_file.file).not_to be_nil
+ expect(File.directory?(export_path)).to eq(false)
+ expect(File.exist?(shared.archive_path)).to eq(false)
+ end
+ end
+
+ context 'when saving services fail' do
+ before do
+ allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false)
+ end
+
+ it 'removes the remaining exported data' do
+ allow_any_instance_of(Gitlab::ImportExport::Saver).to receive(:compress_and_save).and_return(false)
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+
+ expect(group.import_export_upload).to be_nil
+ expect(File.directory?(export_path)).to eq(false)
+ expect(File.exist?(shared.archive_path)).to eq(false)
+ end
+
+ it 'notifies logger' do
+ expect_any_instance_of(Gitlab::Import::Logger).to receive(:error)
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+ end
+end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 5ef1fb1932f..9a490dfd779 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -427,20 +427,34 @@ describe Groups::TransferService do
end
end
- context 'when a project in group has container images' do
+ context 'when a project has container images' do
let(:group) { create(:group, :public, :nested) }
- let!(:project) { create(:project, :repository, :public, namespace: group) }
+ let!(:container_repository) { create(:container_repository, project: project) }
+
+ subject { transfer_service.execute(new_parent_group) }
before do
- stub_container_registry_tags(repository: /image/, tags: %w[rc1])
- create(:container_repository, project: project, name: :image)
- create(:group_member, :owner, group: new_parent_group, user: user)
+ group.add_owner(user)
+ new_parent_group.add_owner(user)
end
- it 'does not allow group to be transferred' do
- transfer_service.execute(new_parent_group)
+ context 'within group' do
+ let(:project) { create(:project, :repository, :public, namespace: group) }
+
+ it 'does not transfer' do
+ expect(subject).to be false
+ expect(transfer_service.error).to match(/Docker images in their Container Registry/)
+ end
+ end
- expect(transfer_service.error).to match(/Docker images in their Container Registry/)
+ context 'within subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+ let(:project) { create(:project, :repository, :public, namespace: subgroup) }
+
+ it 'does not transfer' do
+ expect(subject).to be false
+ expect(transfer_service.error).to match(/Docker images in their Container Registry/)
+ end
end
end
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index ca8eaf4c970..1aa7e06182b 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -32,6 +32,43 @@ describe Groups::UpdateService do
expect(service.execute).to be_falsey
end
+
+ context 'when a project has container images' do
+ let(:params) { { path: SecureRandom.hex } }
+ let!(:container_repository) { create(:container_repository, project: project) }
+
+ subject { described_class.new(public_group, user, params).execute }
+
+ context 'within group' do
+ let(:project) { create(:project, group: public_group) }
+
+ context 'with path updates' do
+ it 'does not allow the update' do
+ expect(subject).to be false
+ expect(public_group.errors[:base].first).to match(/Docker images in their Container Registry/)
+ end
+ end
+
+ context 'with name updates' do
+ let(:params) { { name: 'new-name' } }
+
+ it 'allows the update' do
+ expect(subject).to be true
+ expect(public_group.reload.name).to eq('new-name')
+ end
+ end
+ end
+
+ context 'within subgroup' do
+ let(:subgroup) { create(:group, parent: public_group) }
+ let(:project) { create(:project, group: subgroup) }
+
+ it 'does not allow path updates' do
+ expect(subject).to be false
+ expect(public_group.errors[:base].first).to match(/Docker images in their Container Registry/)
+ end
+ end
+ end
end
context "internal group with internal project" do
@@ -148,30 +185,6 @@ describe Groups::UpdateService do
end
end
- context 'projects in group have container images' do
- let(:service) { described_class.new(public_group, user, path: SecureRandom.hex) }
- let(:project) { create(:project, :internal, group: public_group) }
-
- before do
- stub_container_registry_tags(repository: /image/, tags: %w[rc1])
- create(:container_repository, project: project, name: :image)
- end
-
- it 'does not allow path to be changed' do
- result = described_class.new(public_group, user, path: 'new-path').execute
-
- expect(result).to eq false
- expect(public_group.errors[:base].first).to match(/Docker images in their Container Registry/)
- end
-
- it 'allows other settings to be changed' do
- result = described_class.new(public_group, user, name: 'new-name').execute
-
- expect(result).to eq true
- expect(public_group.reload.name).to eq('new-name')
- end
- end
-
context 'for a subgroup' do
let(:subgroup) { create(:group, :private, parent: private_group) }
diff --git a/spec/services/import_export_clean_up_service_spec.rb b/spec/services/import_export_clean_up_service_spec.rb
index 51720e786dc..9f811f56f50 100644
--- a/spec/services/import_export_clean_up_service_spec.rb
+++ b/spec/services/import_export_clean_up_service_spec.rb
@@ -6,7 +6,7 @@ describe ImportExportCleanUpService do
describe '#execute' do
let(:service) { described_class.new }
- let(:tmp_import_export_folder) { 'tmp/project_exports' }
+ let(:tmp_import_export_folder) { 'tmp/gitlab_exports' }
context 'when the import/export directory does not exist' do
it 'does not remove any archives' do
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 1f7d564b6ec..dce62d1d20e 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -70,7 +70,7 @@ describe Issues::CloseService do
end
describe '#close_issue' do
- context "closed by a merge request" do
+ context "closed by a merge request", :sidekiq_might_not_need_inline do
it 'mentions closure via a merge request' do
perform_enqueued_jobs do
described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request)
@@ -100,7 +100,7 @@ describe Issues::CloseService do
end
end
- context "closed by a commit" do
+ context "closed by a commit", :sidekiq_might_not_need_inline do
it 'mentions closure via a commit' do
perform_enqueued_jobs do
described_class.new(project, user).close_issue(issue, closed_via: closing_commit)
@@ -146,7 +146,7 @@ describe Issues::CloseService do
expect(issue.closed_by_id).to be(user.id)
end
- it 'sends email to user2 about assign of new issue' do
+ it 'sends email to user2 about assign of new issue', :sidekiq_might_not_need_inline do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(issue.title)
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 154bfec0da2..604befd7225 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -169,7 +169,7 @@ describe Issues::UpdateService, :mailer do
end
end
- context 'with background jobs processed' do
+ context 'with background jobs processed', :sidekiq_might_not_need_inline do
before do
perform_enqueued_jobs do
update_issue(opts)
@@ -187,7 +187,6 @@ describe Issues::UpdateService, :mailer do
it 'creates system note about issue reassign' do
note = find_note('assigned to')
- expect(note).not_to be_nil
expect(note.note).to include "assigned to #{user2.to_reference}"
end
@@ -202,14 +201,12 @@ describe Issues::UpdateService, :mailer do
it 'creates system note about title change' do
note = find_note('changed title')
- expect(note).not_to be_nil
expect(note.note).to eq 'changed title from **{-Old-} title** to **{+New+} title**'
end
it 'creates system note about discussion lock' do
note = find_note('locked this issue')
- expect(note).not_to be_nil
expect(note.note).to eq 'locked this issue'
end
end
@@ -221,20 +218,10 @@ describe Issues::UpdateService, :mailer do
note = find_note('changed the description')
- expect(note).not_to be_nil
expect(note.note).to eq('changed the description')
end
end
- it 'creates zoom_link_added system note when a zoom link is added to the description' do
- update_issue(description: 'Changed description https://zoom.us/j/5873603787')
-
- note = find_note('added a Zoom call')
-
- expect(note).not_to be_nil
- expect(note.note).to eq('added a Zoom call to this issue')
- end
-
context 'when issue turns confidential' do
let(:opts) do
{
@@ -252,7 +239,6 @@ describe Issues::UpdateService, :mailer do
note = find_note('made the issue confidential')
- expect(note).not_to be_nil
expect(note.note).to eq 'made the issue confidential'
end
@@ -366,7 +352,7 @@ describe Issues::UpdateService, :mailer do
it_behaves_like 'system notes for milestones'
- it 'sends notifications for subscribers of changed milestone' do
+ it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
issue.milestone = create(:milestone, project: project)
issue.save
@@ -398,7 +384,7 @@ describe Issues::UpdateService, :mailer do
it_behaves_like 'system notes for milestones'
- it 'sends notifications for subscribers of changed milestone' do
+ it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
update_issue(milestone: create(:milestone, project: project))
end
@@ -435,7 +421,7 @@ describe Issues::UpdateService, :mailer do
end
end
- it 'sends notifications for subscribers of newly added labels' do
+ it 'sends notifications for subscribers of newly added labels', :sidekiq_might_not_need_inline do
opts = { label_ids: [label.id] }
perform_enqueued_jobs do
@@ -620,6 +606,24 @@ describe Issues::UpdateService, :mailer do
end
end
+ context 'when same id is passed as add_label_ids and remove_label_ids' do
+ let(:params) { { add_label_ids: [label.id], remove_label_ids: [label.id] } }
+
+ context 'for a label assigned to an issue' do
+ it 'removes the label' do
+ issue.update(labels: [label])
+
+ expect(result.label_ids).to be_empty
+ end
+ end
+
+ context 'for a label not assigned to an issue' do
+ it 'does not add the label' do
+ expect(result.label_ids).to be_empty
+ end
+ end
+ end
+
context 'when duplicate label titles are given' do
let(:params) do
{ labels: [label3.title, label3.title] }
diff --git a/spec/services/issues/zoom_link_service_spec.rb b/spec/services/issues/zoom_link_service_spec.rb
index ba3f007c917..ecca9467965 100644
--- a/spec/services/issues/zoom_link_service_spec.rb
+++ b/spec/services/issues/zoom_link_service_spec.rb
@@ -14,27 +14,16 @@ describe Issues::ZoomLinkService do
project.add_reporter(user)
end
- shared_context 'with Zoom link' do
+ shared_context '"added" Zoom meeting' do
before do
- issue.update!(description: "Description\n\n#{zoom_link}")
+ create(:zoom_meeting, issue: issue)
end
end
- shared_context 'with Zoom link not at the end' do
+ shared_context '"removed" zoom meetings' do
before do
- issue.update!(description: "Description with #{zoom_link} some where")
- end
- end
-
- shared_context 'without Zoom link' do
- before do
- issue.update!(description: "Description\n\nhttp://example.com")
- end
- end
-
- shared_context 'without issue description' do
- before do
- issue.update!(description: nil)
+ create(:zoom_meeting, issue: issue, issue_status: :removed)
+ create(:zoom_meeting, issue: issue, issue_status: :removed)
end
end
@@ -45,11 +34,10 @@ describe Issues::ZoomLinkService do
end
describe '#add_link' do
- shared_examples 'can add link' do
- it 'appends the link to issue description' do
+ shared_examples 'can add meeting' do
+ it 'appends the new meeting to zoom_meetings' do
expect(result).to be_success
- expect(result.payload[:description])
- .to eq("#{issue.description}\n\n#{zoom_link}")
+ expect(ZoomMeeting.canonical_meeting_url(issue)).to eq(zoom_link)
end
it 'tracks the add event' do
@@ -57,55 +45,63 @@ describe Issues::ZoomLinkService do
.with('IncidentManagement::ZoomIntegration', 'add_zoom_meeting', label: 'Issue ID', value: issue.id)
result
end
+
+ it 'creates a zoom_link_added notification' do
+ expect(SystemNoteService).to receive(:zoom_link_added).with(issue, project, user)
+ expect(SystemNoteService).not_to receive(:zoom_link_removed)
+ result
+ end
end
- shared_examples 'cannot add link' do
- it 'cannot add the link' do
+ shared_examples 'cannot add meeting' do
+ it 'cannot add the meeting' do
expect(result).to be_error
expect(result.message).to eq('Failed to add a Zoom meeting')
end
+
+ it 'creates no notification' do
+ expect(SystemNoteService).not_to receive(:zoom_link_added)
+ expect(SystemNoteService).not_to receive(:zoom_link_removed)
+ result
+ end
end
subject(:result) { service.add_link(zoom_link) }
- context 'without Zoom link in the issue description' do
- include_context 'without Zoom link'
- include_examples 'can add link'
+ context 'without existing Zoom meeting' do
+ include_examples 'can add meeting'
- context 'with invalid Zoom link' do
+ context 'with invalid Zoom url' do
let(:zoom_link) { 'https://not-zoom.link' }
- include_examples 'cannot add link'
+ include_examples 'cannot add meeting'
end
context 'with insufficient permissions' do
include_context 'insufficient permissions'
- include_examples 'cannot add link'
+ include_examples 'cannot add meeting'
end
end
- context 'with Zoom link in the issue description' do
- include_context 'with Zoom link'
- include_examples 'cannot add link'
+ context 'with "added" Zoom meeting' do
+ include_context '"added" Zoom meeting'
+ include_examples 'cannot add meeting'
+ end
- context 'but not at the end' do
- include_context 'with Zoom link not at the end'
- include_examples 'can add link'
+ context 'with "added" Zoom meeting and race condition' do
+ include_context '"added" Zoom meeting'
+ before do
+ allow(service).to receive(:can_add_link?).and_return(true)
end
- end
- context 'without issue description' do
- include_context 'without issue description'
- include_examples 'can add link'
+ include_examples 'cannot add meeting'
end
end
describe '#can_add_link?' do
subject { service.can_add_link? }
- context 'without Zoom link in the issue description' do
- include_context 'without Zoom link'
-
+ context 'without "added" zoom meeting' do
it { is_expected.to eq(true) }
context 'with insufficient permissions' do
@@ -115,81 +111,93 @@ describe Issues::ZoomLinkService do
end
end
- context 'with Zoom link in the issue description' do
- include_context 'with Zoom link'
+ context 'with Zoom meeting in the issue description' do
+ include_context '"added" Zoom meeting'
it { is_expected.to eq(false) }
end
end
describe '#remove_link' do
- shared_examples 'cannot remove link' do
- it 'cannot remove the link' do
+ shared_examples 'cannot remove meeting' do
+ it 'cannot remove the meeting' do
expect(result).to be_error
expect(result.message).to eq('Failed to remove a Zoom meeting')
end
- end
- subject(:result) { service.remove_link }
+ it 'creates no notification' do
+ expect(SystemNoteService).not_to receive(:zoom_link_added)
+ expect(SystemNoteService).not_to receive(:zoom_link_removed)
+ result
+ end
+ end
- context 'with Zoom link in the issue description' do
- include_context 'with Zoom link'
+ shared_examples 'can remove meeting' do
+ it 'creates no notification' do
+ expect(SystemNoteService).not_to receive(:zoom_link_added).with(issue, project, user)
+ expect(SystemNoteService).to receive(:zoom_link_removed)
+ result
+ end
- it 'removes the link from the issue description' do
+ it 'can remove the meeting' do
expect(result).to be_success
- expect(result.payload[:description])
- .to eq(issue.description.delete_suffix("\n\n#{zoom_link}"))
+ expect(ZoomMeeting.canonical_meeting_url(issue)).to eq(nil)
end
it 'tracks the remove event' do
expect(Gitlab::Tracking).to receive(:event)
- .with('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id)
-
+ .with('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id)
result
end
+ end
- context 'with insufficient permissions' do
- include_context 'insufficient permissions'
- include_examples 'cannot remove link'
- end
+ subject(:result) { service.remove_link }
- context 'but not at the end' do
- include_context 'with Zoom link not at the end'
- include_examples 'cannot remove link'
+ context 'with Zoom meeting' do
+ include_context '"added" Zoom meeting'
+
+ context 'removes the link' do
+ include_examples 'can remove meeting'
end
- end
- context 'without Zoom link in the issue description' do
- include_context 'without Zoom link'
- include_examples 'cannot remove link'
+ context 'with insufficient permissions' do
+ include_context 'insufficient permissions'
+ include_examples 'cannot remove meeting'
+ end
end
- context 'without issue description' do
- include_context 'without issue description'
- include_examples 'cannot remove link'
+ context 'without "added" Zoom meeting' do
+ include_context '"removed" zoom meetings'
+ include_examples 'cannot remove meeting'
end
end
describe '#can_remove_link?' do
subject { service.can_remove_link? }
- context 'with Zoom link in the issue description' do
- include_context 'with Zoom link'
+ context 'without Zoom meeting' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'with only "removed" zoom meetings' do
+ include_context '"removed" zoom meetings'
+ it { is_expected.to eq(false) }
+ end
+ context 'with "added" Zoom meeting' do
+ include_context '"added" Zoom meeting'
it { is_expected.to eq(true) }
+ context 'with "removed" zoom meetings' do
+ include_context '"removed" zoom meetings'
+ it { is_expected.to eq(true) }
+ end
+
context 'with insufficient permissions' do
include_context 'insufficient permissions'
-
it { is_expected.to eq(false) }
end
end
-
- context 'without Zoom link in the issue description' do
- include_context 'without Zoom link'
-
- it { is_expected.to eq(false) }
- end
end
describe '#parse_link' do
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index effcaf53535..73ac0bd7716 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -45,7 +45,7 @@ describe Members::DestroyService do
shared_examples 'a service destroying a member with access' do
it_behaves_like 'a service destroying a member'
- it 'invalidates cached counts for assigned issues and merge requests', :aggregate_failures do
+ it 'invalidates cached counts for assigned issues and merge requests', :aggregate_failures, :sidekiq_might_not_need_inline do
create(:issue, project: group_project, assignees: [member_user])
create(:merge_request, source_project: group_project, assignees: [member_user])
create(:todo, :pending, project: group_project, user: member_user)
diff --git a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
index f26b67f902d..203048984a1 100644
--- a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
+++ b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
@@ -10,9 +10,7 @@ describe MergeRequests::AddTodoWhenBuildFailsService do
let(:ref) { merge_request.source_branch }
let(:pipeline) do
- create(:ci_pipeline_with_one_job, ref: ref,
- project: project,
- sha: sha)
+ create(:ci_pipeline, ref: ref, project: project, sha: sha)
end
let(:service) do
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 68e53553043..9b358839c06 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -80,7 +80,7 @@ describe MergeRequests::BuildService do
end
it 'does not assign force_remove_source_branch' do
- expect(merge_request.force_remove_source_branch?).to be_falsey
+ expect(merge_request.force_remove_source_branch?).to be_truthy
end
context 'with force_remove_source_branch parameter when the user is authorized' do
@@ -91,6 +91,36 @@ describe MergeRequests::BuildService do
it 'assigns force_remove_source_branch' do
expect(merge_request.force_remove_source_branch?).to be_truthy
end
+
+ context 'with project setting remove_source_branch_after_merge false' do
+ before do
+ project.remove_source_branch_after_merge = false
+ end
+
+ it 'assigns force_remove_source_branch' do
+ expect(merge_request.force_remove_source_branch?).to be_truthy
+ end
+ end
+ end
+
+ context 'with project setting remove_source_branch_after_merge true' do
+ before do
+ project.remove_source_branch_after_merge = true
+ end
+
+ it 'assigns force_remove_source_branch' do
+ expect(merge_request.force_remove_source_branch?).to be_truthy
+ end
+
+ context 'with force_remove_source_branch parameter false' do
+ before do
+ params[:force_remove_source_branch] = '0'
+ end
+
+ it 'does not assign force_remove_source_branch' do
+ expect(merge_request.force_remove_source_branch?).to be(false)
+ end
+ end
end
context 'missing source branch' do
@@ -131,7 +161,7 @@ describe MergeRequests::BuildService do
let!(:project) { fork_project(target_project, user, namespace: user.namespace, repository: true) }
let(:source_project) { project }
- it 'creates compare object with target branch as default branch' do
+ it 'creates compare object with target branch as default branch', :sidekiq_might_not_need_inline do
expect(merge_request.compare).to be_present
expect(merge_request.target_branch).to eq(project.default_branch)
end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 29b7e0f17e2..b037b73752e 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -38,7 +38,7 @@ describe MergeRequests::CloseService do
.with(@merge_request, 'close')
end
- it 'sends email to user2 about assign of new merge_request' do
+ it 'sends email to user2 about assign of new merge_request', :sidekiq_might_not_need_inline do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 51a5c51f6c3..7145cfe7897 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -36,25 +36,25 @@ describe MergeRequests::CreateFromIssueService do
expect(result[:message]).to eq('Invalid issue iid')
end
- it 'creates a branch based on issue title' do
+ it 'creates a branch based on issue title', :sidekiq_might_not_need_inline do
service.execute
expect(target_project.repository.branch_exists?(issue.to_branch_name)).to be_truthy
end
- it 'creates a branch using passed name' do
+ it 'creates a branch using passed name', :sidekiq_might_not_need_inline do
service_with_custom_source_branch.execute
expect(target_project.repository.branch_exists?(custom_source_branch)).to be_truthy
end
- it 'creates the new_merge_request system note' do
+ it 'creates the new_merge_request system note', :sidekiq_might_not_need_inline do
expect(SystemNoteService).to receive(:new_merge_request).with(issue, project, user, instance_of(MergeRequest))
service.execute
end
- it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created' do
+ it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do
expect_any_instance_of(MergeRequest).to receive(:valid?).at_least(:once).and_return(false)
expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name, branch_project: target_project)
@@ -62,35 +62,35 @@ describe MergeRequests::CreateFromIssueService do
service.execute
end
- it 'creates a merge request' do
+ it 'creates a merge request', :sidekiq_might_not_need_inline do
expect { service.execute }.to change(target_project.merge_requests, :count).by(1)
end
- it 'sets the merge request author to current user' do
+ it 'sets the merge request author to current user', :sidekiq_might_not_need_inline do
result = service.execute
expect(result[:merge_request].author).to eq(user)
end
- it 'sets the merge request source branch to the new issue branch' do
+ it 'sets the merge request source branch to the new issue branch', :sidekiq_might_not_need_inline do
result = service.execute
expect(result[:merge_request].source_branch).to eq(issue.to_branch_name)
end
- it 'sets the merge request source branch to the passed branch name' do
+ it 'sets the merge request source branch to the passed branch name', :sidekiq_might_not_need_inline do
result = service_with_custom_source_branch.execute
expect(result[:merge_request].source_branch).to eq(custom_source_branch)
end
- it 'sets the merge request target branch to the project default branch' do
+ it 'sets the merge request target branch to the project default branch', :sidekiq_might_not_need_inline do
result = service.execute
expect(result[:merge_request].target_branch).to eq(target_project.default_branch)
end
- it 'executes quick actions if the build service sets them in the description' do
+ it 'executes quick actions if the build service sets them in the description', :sidekiq_might_not_need_inline do
allow(service).to receive(:merge_request).and_wrap_original do |m, *args|
m.call(*args).tap do |merge_request|
merge_request.description = "/assign #{user.to_reference}"
@@ -102,7 +102,7 @@ describe MergeRequests::CreateFromIssueService do
expect(result[:merge_request].assignees).to eq([user])
end
- context 'when ref branch is set' do
+ context 'when ref branch is set', :sidekiq_might_not_need_inline do
subject { described_class.new(project, user, ref: 'feature', **service_params).execute }
it 'sets the merge request source branch to the new issue branch' do
@@ -193,7 +193,7 @@ describe MergeRequests::CreateFromIssueService do
it_behaves_like 'a service that creates a merge request from an issue'
- it 'sets the merge request title to: "WIP: $issue-branch-name' do
+ it 'sets the merge request title to: "WIP: $issue-branch-name', :sidekiq_might_not_need_inline do
result = service.execute
expect(result[:merge_request].title).to eq("WIP: #{issue.to_branch_name.titleize.humanize}")
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 699f2a98088..3db1471bf3c 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -57,7 +57,7 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(Todo.where(attributes).count).to be_zero
end
- it 'creates exactly 1 create MR event' do
+ it 'creates exactly 1 create MR event', :sidekiq_might_not_need_inline do
attributes = {
action: Event::CREATED,
target_id: merge_request.id,
@@ -216,7 +216,7 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
target_project.add_maintainer(user)
end
- it 'create legacy detached merge request pipeline for fork merge request' do
+ it 'create legacy detached merge request pipeline for fork merge request', :sidekiq_might_not_need_inline do
expect(merge_request.actual_head_pipeline)
.to be_legacy_detached_merge_request_pipeline
end
@@ -477,7 +477,7 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
project.add_developer(user)
end
- it 'creates the merge request' do
+ it 'creates the merge request', :sidekiq_might_not_need_inline do
merge_request = described_class.new(project, user, opts).execute
expect(merge_request).to be_persisted
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index 3b1096c51cb..87fcd70a298 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -13,6 +13,7 @@ describe MergeRequests::FfMergeService do
author: create(:user))
end
let(:project) { merge_request.project }
+ let(:valid_merge_params) { { sha: merge_request.diff_head_sha } }
before do
project.add_maintainer(user)
@@ -21,39 +22,69 @@ describe MergeRequests::FfMergeService do
describe '#execute' do
context 'valid params' do
- let(:service) { described_class.new(project, user, {}) }
-
- before do
- allow(service).to receive(:execute_hooks)
+ let(:service) { described_class.new(project, user, valid_merge_params) }
+ def execute_ff_merge
perform_enqueued_jobs do
service.execute(merge_request)
end
end
+ before do
+ allow(service).to receive(:execute_hooks)
+ end
+
it "does not create merge commit" do
+ execute_ff_merge
+
source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
+
expect(source_branch_sha).to eq(target_branch_sha)
end
- it { expect(merge_request).to be_valid }
- it { expect(merge_request).to be_merged }
+ it 'keeps the merge request valid' do
+ expect { execute_ff_merge }
+ .not_to change { merge_request.valid? }
+ end
+
+ it 'updates the merge request to merged' do
+ expect { execute_ff_merge }
+ .to change { merge_request.merged? }
+ .from(false)
+ .to(true)
+ end
it 'sends email to user2 about merge of new merge_request' do
+ execute_ff_merge
+
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
end
it 'creates system note about merge_request merge' do
+ execute_ff_merge
+
note = merge_request.notes.last
expect(note.note).to include 'merged'
end
+
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
+ end
+
+ it 'updates squash_commit_sha if it is a squash' do
+ merge_request.update!(squash: true)
+
+ expect { execute_ff_merge }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
+ end
end
- context "error handling" do
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
+ context 'error handling' do
+ let(:service) { described_class.new(project, user, valid_merge_params.merge(commit_message: 'Awesome message')) }
before do
allow(Rails.logger).to receive(:error)
@@ -82,6 +113,16 @@ describe MergeRequests::FfMergeService do
expect(merge_request.merge_error).to include(error_message)
expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
end
+
+ it 'does not update squash_commit_sha if squash merge is not successful' do
+ merge_request.update!(squash: true)
+
+ expect(project.repository.raw).to receive(:ff_merge) do
+ raise 'Merge error'
+ end
+
+ expect { service.execute(merge_request) }.not_to change { merge_request.squash_commit_sha }
+ end
end
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 22578436c18..c938dd1cb0b 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -14,9 +14,12 @@ describe MergeRequests::MergeService do
end
describe '#execute' do
- context 'valid params' do
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
+ let(:service) { described_class.new(project, user, merge_params) }
+ let(:merge_params) do
+ { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
+ end
+ context 'valid params' do
before do
allow(service).to receive(:execute_hooks)
@@ -38,11 +41,80 @@ describe MergeRequests::MergeService do
note = merge_request.notes.last
expect(note.note).to include 'merged'
end
+
+ context 'when squashing' do
+ let(:merge_params) do
+ { commit_message: 'Merge commit message',
+ squash_commit_message: 'Squash commit message',
+ sha: merge_request.diff_head_sha }
+ end
+
+ let(:merge_request) do
+ # A merge reqeust with 5 commits
+ create(:merge_request, :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix')
+ end
+
+ it 'merges the merge request with squashed commits' do
+ expect(merge_request).to be_merged
+
+ merge_commit = merge_request.merge_commit
+ squash_commit = merge_request.merge_commit.parents.last
+
+ expect(merge_commit.message).to eq('Merge commit message')
+ expect(squash_commit.message).to eq("Squash commit message\n")
+ end
+ end
end
- context 'closes related issues' do
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
+ context 'when an invalid sha is passed' do
+ let(:merge_request) do
+ create(:merge_request, :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix')
+ end
+
+ let(:merge_params) do
+ { sha: merge_request.commits.second.sha }
+ end
+
+ it 'does not merge the MR' do
+ service.execute(merge_request)
+
+ expect(merge_request).not_to be_merged
+ expect(merge_request.merge_error).to match(/Branch has been updated/)
+ end
+ end
+
+ context 'when the `sha` param is missing' do
+ let(:merge_params) { {} }
+
+ it 'returns the error' do
+ merge_error = 'Branch has been updated since the merge was requested. '\
+ 'Please review the changes.'
+
+ expect { service.execute(merge_request) }
+ .to change { merge_request.merge_error }
+ .from(nil).to(merge_error)
+ end
+
+ it 'merges the MR when the feature is disabled' do
+ stub_feature_flags(validate_merge_sha: false)
+ service.execute(merge_request)
+
+ expect(merge_request).to be_merged
+ end
+ end
+
+ context 'closes related issues' do
before do
allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
end
@@ -83,12 +155,12 @@ describe MergeRequests::MergeService do
service.execute(merge_request)
end
- context "when jira_issue_transition_id is not present" do
+ context 'when jira_issue_transition_id is not present' do
before do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(nil)
end
- it "does not close issue" do
+ it 'does not close issue' do
allow(jira_tracker).to receive_messages(jira_issue_transition_id: nil)
expect_any_instance_of(JiraService).not_to receive(:transition_issue)
@@ -97,7 +169,7 @@ describe MergeRequests::MergeService do
end
end
- context "wrong issue markdown" do
+ context 'wrong issue markdown' do
it 'does not close issues on Jira issue tracker' do
jira_issue = ExternalIssue.new('#JIRA-123', project)
stub_jira_urls(jira_issue)
@@ -115,7 +187,7 @@ describe MergeRequests::MergeService do
context 'closes related todos' do
let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
let(:project) { merge_request.project }
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
+
let!(:todo) do
create(:todo, :assigned,
project: project,
@@ -139,7 +211,7 @@ describe MergeRequests::MergeService do
context 'source branch removal' do
context 'when the source branch is protected' do
let(:service) do
- described_class.new(project, user, 'should_remove_source_branch' => true)
+ described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
end
before do
@@ -154,7 +226,7 @@ describe MergeRequests::MergeService do
context 'when the source branch is the default branch' do
let(:service) do
- described_class.new(project, user, 'should_remove_source_branch' => true)
+ described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
end
before do
@@ -169,8 +241,6 @@ describe MergeRequests::MergeService do
context 'when the source branch can be removed' do
context 'when MR author set the source branch to be removed' do
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
-
before do
merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
end
@@ -183,7 +253,7 @@ describe MergeRequests::MergeService do
end
context 'when the merger set the source branch not to be removed' do
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message', 'should_remove_source_branch' => false) }
+ let(:service) { described_class.new(project, user, merge_params.merge('should_remove_source_branch' => false)) }
it 'does not delete the source branch' do
expect(DeleteBranchService).not_to receive(:new)
@@ -194,7 +264,7 @@ describe MergeRequests::MergeService do
context 'when MR merger set the source branch to be removed' do
let(:service) do
- described_class.new(project, user, commit_message: 'Awesome message', 'should_remove_source_branch' => true)
+ described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
end
it 'removes the source branch using the current user' do
@@ -207,9 +277,7 @@ describe MergeRequests::MergeService do
end
end
- context "error handling" do
- let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
-
+ context 'error handling' do
before do
allow(Rails.logger).to receive(:error)
end
@@ -230,7 +298,7 @@ describe MergeRequests::MergeService do
it 'logs and saves error if there is an exception' do
error_message = 'error message'
- allow(service).to receive(:repository).and_raise("error message")
+ allow(service).to receive(:repository).and_raise('error message')
allow(service).to receive(:execute_hooks)
service.execute(merge_request)
@@ -310,7 +378,7 @@ describe MergeRequests::MergeService do
expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
end
- context "when fast-forward merge is not allowed" do
+ context 'when fast-forward merge is not allowed' do
before do
allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 758679edc45..cccafddc450 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -76,7 +76,7 @@ describe MergeRequests::MergeToRefService do
described_class.new(project, user, **params)
end
- let(:params) { { commit_message: 'Awesome message', should_remove_source_branch: true } }
+ let(:params) { { commit_message: 'Awesome message', should_remove_source_branch: true, sha: merge_request.diff_head_sha } }
def process_merge_to_ref
perform_enqueued_jobs do
@@ -103,7 +103,7 @@ describe MergeRequests::MergeToRefService do
end
let(:merge_service) do
- MergeRequests::MergeService.new(project, user, {})
+ MergeRequests::MergeService.new(project, user, { sha: merge_request.diff_head_sha })
end
context 'when merge commit' do
@@ -205,7 +205,7 @@ describe MergeRequests::MergeToRefService do
end
context 'when target ref is passed as a parameter' do
- let(:params) { { commit_message: 'merge train', target_ref: target_ref } }
+ let(:params) { { commit_message: 'merge train', target_ref: target_ref, sha: merge_request.diff_head_sha } }
it_behaves_like 'successfully merges to ref with merge method' do
let(:first_parent_ref) { 'refs/heads/master' }
@@ -215,7 +215,7 @@ describe MergeRequests::MergeToRefService do
describe 'cascading merge refs' do
set(:project) { create(:project, :repository) }
- let(:params) { { commit_message: 'Cascading merge', first_parent_ref: first_parent_ref, target_ref: target_ref } }
+ let(:params) { { commit_message: 'Cascading merge', first_parent_ref: first_parent_ref, target_ref: target_ref, sha: merge_request.diff_head_sha } }
context 'when first merge happens' do
let(:merge_request) do
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index ff4cdd3e7e2..75b9c2304a6 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -46,7 +46,7 @@ describe MergeRequests::PushOptionsHandlerService do
expect(last_mr.assignees).to contain_exactly(user)
end
- context 'when project has been forked' do
+ context 'when project has been forked', :sidekiq_might_not_need_inline do
let(:forked_project) { fork_project(project, user, repository: true) }
let(:service) { described_class.new(forked_project, user, changes, push_options) }
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 7b8c94c86fe..9c535664c26 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -174,7 +174,7 @@ describe MergeRequests::RebaseService do
target_branch: 'master', target_project: project)
end
- it 'rebases source branch' do
+ it 'rebases source branch', :sidekiq_might_not_need_inline do
parent_sha = forked_project.repository.commit(merge_request_from_fork.source_branch).parents.first.sha
target_branch_sha = project.repository.commit(merge_request_from_fork.target_branch).sha
expect(parent_sha).to eq(target_branch_sha)
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 2dc932c9f2c..9d0ad60a624 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -137,7 +137,7 @@ describe MergeRequests::RefreshService do
subject { service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master') }
- it 'updates the head_pipeline_id for @merge_request' do
+ it 'updates the head_pipeline_id for @merge_request', :sidekiq_might_not_need_inline do
expect { subject }.to change { @merge_request.reload.head_pipeline_id }.from(nil).to(pipeline.id)
end
@@ -200,7 +200,7 @@ describe MergeRequests::RefreshService do
context 'when service runs on forked project' do
let(:project) { @fork_project }
- it 'creates legacy detached merge request pipeline for fork merge request' do
+ it 'creates legacy detached merge request pipeline for fork merge request', :sidekiq_might_not_need_inline do
expect { subject }
.to change { @fork_merge_request.pipelines_for_merge_request.count }.by(1)
@@ -232,7 +232,7 @@ describe MergeRequests::RefreshService do
subject
end
- it 'sets the latest detached merge request pipeline as a head pipeline' do
+ it 'sets the latest detached merge request pipeline as a head pipeline', :sidekiq_might_not_need_inline do
@merge_request.reload
expect(@merge_request.actual_head_pipeline).to be_merge_request_event
end
@@ -304,7 +304,7 @@ describe MergeRequests::RefreshService do
end
end
- context 'push to origin repo target branch' do
+ context 'push to origin repo target branch', :sidekiq_might_not_need_inline do
context 'when all MRs to the target branch had diffs' do
before do
service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
@@ -354,7 +354,7 @@ describe MergeRequests::RefreshService do
end
end
- context 'manual merge of source branch' do
+ context 'manual merge of source branch', :sidekiq_might_not_need_inline do
before do
# Merge master -> feature branch
@project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
@@ -374,7 +374,7 @@ describe MergeRequests::RefreshService do
end
end
- context 'push to fork repo source branch' do
+ context 'push to fork repo source branch', :sidekiq_might_not_need_inline do
let(:refresh_service) { service.new(@fork_project, @user) }
def refresh
@@ -431,7 +431,7 @@ describe MergeRequests::RefreshService do
end
end
- context 'push to fork repo target branch' do
+ context 'push to fork repo target branch', :sidekiq_might_not_need_inline do
describe 'changes to merge requests' do
before do
service.new(@fork_project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
@@ -457,7 +457,7 @@ describe MergeRequests::RefreshService do
end
end
- context 'forked projects with the same source branch name as target branch' do
+ context 'forked projects with the same source branch name as target branch', :sidekiq_might_not_need_inline do
let!(:first_commit) do
@fork_project.repository.create_file(@user, 'test1.txt', 'Test data',
message: 'Test commit',
@@ -537,7 +537,7 @@ describe MergeRequests::RefreshService do
context 'push new branch that exists in a merge request' do
let(:refresh_service) { service.new(@fork_project, @user) }
- it 'refreshes the merge request' do
+ it 'refreshes the merge request', :sidekiq_might_not_need_inline do
expect(refresh_service).to receive(:execute_hooks)
.with(@fork_merge_request, 'update', old_rev: Gitlab::Git::BLANK_SHA)
allow_any_instance_of(Repository).to receive(:merge_base).and_return(@oldrev)
@@ -769,7 +769,7 @@ describe MergeRequests::RefreshService do
fork_project(target_project, author, repository: true)
end
- let_it_be(:merge_request) do
+ let_it_be(:merge_request, refind: true) do
create(:merge_request,
author: author,
source_project: source_project,
@@ -795,88 +795,58 @@ describe MergeRequests::RefreshService do
.parent_id
end
+ let(:auto_merge_strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
let(:refresh_service) { service.new(project, user) }
before do
target_project.merge_method = merge_method
target_project.save!
+ merge_request.auto_merge_strategy = auto_merge_strategy
+ merge_request.save!
refresh_service.execute(oldrev, newrev, 'refs/heads/master')
merge_request.reload
end
- let(:aborted_message) do
- /aborted the automatic merge because target branch was updated/
- end
-
- shared_examples 'aborted MWPS' do
- it 'aborts auto_merge' do
- expect(merge_request.auto_merge_enabled?).to be_falsey
- expect(merge_request.notes.last.note).to match(aborted_message)
- end
-
- it 'removes merge_user' do
- expect(merge_request.merge_user).to be_nil
- end
-
- it 'does not add todos for merge user' do
- expect(user.todos.for_target(merge_request)).to be_empty
- end
-
- it 'adds todos for merge author' do
- expect(author.todos.for_target(merge_request)).to be_present.and be_all(&:pending?)
- end
- end
-
context 'when Project#merge_method is set to FF' do
let(:merge_method) { :ff }
- it_behaves_like 'aborted MWPS'
+ it_behaves_like 'aborted merge requests for MWPS'
context 'with forked project' do
let(:source_project) { forked_project }
- it_behaves_like 'aborted MWPS'
+ it_behaves_like 'aborted merge requests for MWPS'
+ end
+
+ context 'with bogus auto merge strategy' do
+ let(:auto_merge_strategy) { 'bogus' }
+
+ it_behaves_like 'maintained merge requests for MWPS'
end
end
context 'when Project#merge_method is set to rebase_merge' do
let(:merge_method) { :rebase_merge }
- it_behaves_like 'aborted MWPS'
+ it_behaves_like 'aborted merge requests for MWPS'
context 'with forked project' do
let(:source_project) { forked_project }
- it_behaves_like 'aborted MWPS'
+ it_behaves_like 'aborted merge requests for MWPS'
end
end
context 'when Project#merge_method is set to merge' do
let(:merge_method) { :merge }
- shared_examples 'maintained MWPS' do
- it 'does not cancel auto merge' do
- expect(merge_request.auto_merge_enabled?).to be_truthy
- expect(merge_request.notes).to be_empty
- end
-
- it 'does not change merge_user' do
- expect(merge_request.merge_user).to eq(user)
- end
-
- it 'does not add todos' do
- expect(author.todos.for_target(merge_request)).to be_empty
- expect(user.todos.for_target(merge_request)).to be_empty
- end
- end
-
- it_behaves_like 'maintained MWPS'
+ it_behaves_like 'maintained merge requests for MWPS'
context 'with forked project' do
let(:source_project) { forked_project }
- it_behaves_like 'maintained MWPS'
+ it_behaves_like 'maintained merge requests for MWPS'
end
end
end
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index 7a98437f724..25ab79d70c3 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -37,7 +37,7 @@ describe MergeRequests::ReopenService do
.with(merge_request, 'reopen')
end
- it 'sends email to user2 about reopen of merge_request' do
+ it 'sends email to user2 about reopen of merge_request', :sidekiq_might_not_need_inline do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
diff --git a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
index 0a10a9ee13b..dc2bd5bf3d0 100644
--- a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
+++ b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
@@ -38,7 +38,7 @@ describe MergeRequests::ResolvedDiscussionNotificationService do
subject.execute(merge_request)
end
- it "sends a notification email" do
+ it "sends a notification email", :sidekiq_might_not_need_inline do
expect_any_instance_of(NotificationService).to receive(:resolve_all_discussions).with(merge_request, user)
subject.execute(merge_request)
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index d31f5dc0176..baa0ecf27e3 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -98,7 +98,7 @@ describe MergeRequests::UpdateService, :mailer do
)
end
- it 'sends email to user2 about assign of new merge request and email to user3 about merge request unassignment' do
+ it 'sends email to user2 about assign of new merge request and email to user3 about merge request unassignment', :sidekiq_might_not_need_inline do
deliveries = ActionMailer::Base.deliveries
email = deliveries.last
recipients = deliveries.last(2).flat_map(&:to)
@@ -181,7 +181,7 @@ describe MergeRequests::UpdateService, :mailer do
end
end
- it 'merges the MR' do
+ it 'merges the MR', :sidekiq_might_not_need_inline do
expect(@merge_request).to be_valid
expect(@merge_request.state).to eq('merged')
expect(@merge_request.merge_error).to be_nil
@@ -190,7 +190,7 @@ describe MergeRequests::UpdateService, :mailer do
context 'with finished pipeline' do
before do
- create(:ci_pipeline_with_one_job,
+ create(:ci_pipeline,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
@@ -202,7 +202,7 @@ describe MergeRequests::UpdateService, :mailer do
end
end
- it 'merges the MR' do
+ it 'merges the MR', :sidekiq_might_not_need_inline do
expect(@merge_request).to be_valid
expect(@merge_request.state).to eq('merged')
end
@@ -212,14 +212,14 @@ describe MergeRequests::UpdateService, :mailer do
before do
service_mock = double
create(
- :ci_pipeline_with_one_job,
+ :ci_pipeline,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
head_pipeline_of: merge_request
)
- expect(AutoMerge::MergeWhenPipelineSucceedsService).to receive(:new).with(project, user, {})
+ expect(AutoMerge::MergeWhenPipelineSucceedsService).to receive(:new).with(project, user, { sha: merge_request.diff_head_sha })
.and_return(service_mock)
allow(service_mock).to receive(:available_for?) { true }
expect(service_mock).to receive(:execute).with(merge_request)
@@ -332,7 +332,7 @@ describe MergeRequests::UpdateService, :mailer do
it_behaves_like 'system notes for milestones'
- it 'sends notifications for subscribers of changed milestone' do
+ it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
merge_request.milestone = create(:milestone, project: project)
merge_request.save
@@ -364,7 +364,7 @@ describe MergeRequests::UpdateService, :mailer do
it_behaves_like 'system notes for milestones'
- it 'sends notifications for subscribers of changed milestone' do
+ it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
update_merge_request(milestone: create(:milestone, project: project))
end
@@ -411,7 +411,7 @@ describe MergeRequests::UpdateService, :mailer do
context 'when auto merge is enabled and target branch changed' do
before do
- AutoMergeService.new(project, user).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ AutoMergeService.new(project, user, { sha: merge_request.diff_head_sha }).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
update_merge_request({ target_branch: 'target' })
end
@@ -431,7 +431,7 @@ describe MergeRequests::UpdateService, :mailer do
project.add_developer(subscriber)
end
- it 'sends notifications for subscribers of newly added labels' do
+ it 'sends notifications for subscribers of newly added labels', :sidekiq_might_not_need_inline do
opts = { label_ids: [label.id] }
perform_enqueued_jobs do
diff --git a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
new file mode 100644
index 00000000000..f200c636aac
--- /dev/null
+++ b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
@@ -0,0 +1,177 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::GrafanaMetricEmbedService do
+ include MetricsDashboardHelpers
+ include ReactiveCachingHelpers
+ include GrafanaApiHelpers
+
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
+
+ let(:grafana_url) do
+ valid_grafana_dashboard_link(grafana_integration.grafana_url)
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '.valid_params?' do
+ let(:valid_params) { { embedded: true, grafana_url: grafana_url } }
+
+ subject { described_class.valid_params?(params) }
+
+ let(:params) { valid_params }
+
+ it { is_expected.to be_truthy }
+
+ context 'not embedded' do
+ let(:params) { valid_params.except(:embedded) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'undefined grafana_url' do
+ let(:params) { valid_params.except(:grafana_url) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.from_cache' do
+ let(:params) { [project.id, user.id, grafana_url] }
+
+ subject { described_class.from_cache(*params) }
+
+ it 'initializes an instance of GrafanaMetricEmbedService' do
+ expect(subject).to be_an_instance_of(described_class)
+ expect(subject.project).to eq(project)
+ expect(subject.current_user).to eq(user)
+ expect(subject.params[:grafana_url]).to eq(grafana_url)
+ end
+ end
+
+ describe '#get_dashboard', :use_clean_rails_memory_store_caching do
+ let(:service_params) do
+ [
+ project,
+ user,
+ {
+ embedded: true,
+ grafana_url: grafana_url
+ }
+ ]
+ end
+
+ let(:service) { described_class.new(*service_params) }
+ let(:service_call) { service.get_dashboard }
+
+ context 'without caching' do
+ before do
+ synchronous_reactive_cache(service)
+ end
+
+ it_behaves_like 'raises error for users with insufficient permissions'
+
+ context 'without a grafana integration' do
+ before do
+ allow(project).to receive(:grafana_integration).and_return(nil)
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request
+ end
+
+ context 'when grafana cannot be reached' do
+ before do
+ allow(grafana_integration.client).to receive(:get_dashboard).and_raise(::Grafana::Client::Error)
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :service_unavailable
+ end
+
+ context 'when panelId is missing' do
+ let(:grafana_url) do
+ grafana_integration.grafana_url +
+ '/d/XDaNK6amz/gitlab-omnibus-redis' \
+ '?from=1570397739557&to=1570484139557'
+ end
+
+ before do
+ stub_dashboard_request(grafana_integration.grafana_url)
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
+ end
+
+ context 'when uid is missing' do
+ let(:grafana_url) { grafana_integration.grafana_url + '/d/' }
+
+ before do
+ stub_dashboard_request(grafana_integration.grafana_url)
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
+ end
+
+ context 'when the dashboard response contains misconfigured json' do
+ before do
+ stub_dashboard_request(grafana_integration.grafana_url, body: '')
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
+ end
+
+ context 'when the datasource response contains misconfigured json' do
+ before do
+ stub_dashboard_request(grafana_integration.grafana_url)
+ stub_datasource_request(grafana_integration.grafana_url, body: '')
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
+ end
+
+ context 'when the embed was created successfully' do
+ before do
+ stub_dashboard_request(grafana_integration.grafana_url)
+ stub_datasource_request(grafana_integration.grafana_url)
+ end
+
+ it_behaves_like 'valid embedded dashboard service response'
+ end
+ end
+
+ context 'with caching', :use_clean_rails_memory_store_caching do
+ let(:cache_params) { [project.id, user.id, grafana_url] }
+
+ context 'when value not present in cache' do
+ it 'returns nil' do
+ expect(ReactiveCachingWorker)
+ .to receive(:perform_async)
+ .with(service.class, service.id, *cache_params)
+
+ expect(service_call).to eq(nil)
+ end
+ end
+
+ context 'when value present in cache' do
+ let(:return_value) { { 'http_status' => :ok, 'dashboard' => '{}' } }
+
+ before do
+ stub_reactive_cache(service, return_value, cache_params)
+ end
+
+ it 'returns cached value' do
+ expect(ReactiveCachingWorker)
+ .not_to receive(:perform_async)
+ .with(service.class, service.id, *cache_params)
+
+ expect(service_call[:http_status]).to eq(return_value[:http_status])
+ expect(service_call[:dashboard]).to eq(return_value[:dashboard])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
index e76db868425..ab7a7b97861 100644
--- a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
@@ -80,7 +80,8 @@ describe Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_st
[{
path: dashboard_path,
display_name: 'test.yml',
- default: false
+ default: false,
+ system_dashboard: false
}]
)
end
diff --git a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
index 8be3e7f6064..ec861465662 100644
--- a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
@@ -44,7 +44,8 @@ describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_sto
[{
path: described_class::SYSTEM_DASHBOARD_PATH,
display_name: described_class::SYSTEM_DASHBOARD_NAME,
- default: true
+ default: true,
+ system_dashboard: true
}]
)
end
diff --git a/spec/services/namespaces/statistics_refresher_service_spec.rb b/spec/services/namespaces/statistics_refresher_service_spec.rb
index f4d9c96f7f4..9d42e917efe 100644
--- a/spec/services/namespaces/statistics_refresher_service_spec.rb
+++ b/spec/services/namespaces/statistics_refresher_service_spec.rb
@@ -23,7 +23,7 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
end
end
- context 'with a root storage statistics relation' do
+ context 'with a root storage statistics relation', :sidekiq_might_not_need_inline do
before do
Namespace::AggregationSchedule.safe_find_or_create_by!(namespace_id: group.id)
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index aa67b87a645..25900043f11 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -682,7 +682,7 @@ describe NotificationService, :mailer do
context 'when recipients for a new release exist' do
let(:release) { create(:release) }
- it 'calls new_release_email for each relevant recipient' do
+ it 'calls new_release_email for each relevant recipient', :sidekiq_might_not_need_inline do
user_1 = create(:user)
user_2 = create(:user)
user_3 = create(:user)
@@ -869,6 +869,18 @@ describe NotificationService, :mailer do
should_email(user_4)
end
+ it 'adds "subscribed" reason to subscriber emails' do
+ user_1 = create(:user)
+ label = create(:label, project: project, issues: [issue])
+ issue.reload
+ label.subscribe(user_1)
+
+ notification.new_issue(issue, @u_disabled)
+
+ email = find_email_for(user_1)
+ expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::SUBSCRIBED)
+ end
+
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.new_issue(issue, @u_disabled) }
@@ -1272,6 +1284,17 @@ describe NotificationService, :mailer do
let(:notification_target) { issue }
let(:notification_trigger) { notification.close_issue(issue, @u_disabled) }
end
+
+ it 'adds "subscribed" reason to subscriber emails' do
+ user_1 = create(:user)
+ issue.subscribe(user_1)
+ issue.reload
+
+ notification.close_issue(issue, @u_disabled)
+
+ email = find_email_for(user_1)
+ expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::SUBSCRIBED)
+ end
end
describe '#reopen_issue' do
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index 8585d495ffb..bf637b70aaf 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -222,7 +222,7 @@ describe Projects::AfterRenameService do
def expect_repository_exist(full_path_with_extension)
expect(
- gitlab_shell.exists?(
+ TestEnv.storage_dir_exists?(
project.repository_storage,
full_path_with_extension
)
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index f296ef3a776..1cfe3582e56 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -57,21 +57,7 @@ describe Projects::ContainerRepository::DeleteTagsService do
end
end
- context 'with dummy tags disabled' do
- let(:tags) { %w[A Ba] }
-
- before do
- stub_feature_flags(container_registry_smart_delete: false)
- end
-
- it 'deletes tags one by one' do
- expect_delete_tag('sha256:configA')
- expect_delete_tag('sha256:configB')
- is_expected.to include(status: :success)
- end
- end
-
- context 'with dummy tags enabled' do
+ context 'with tags to delete' do
let(:tags) { %w[A Ba] }
it 'deletes the tags using a dummy image' do
@@ -102,6 +88,33 @@ describe Projects::ContainerRepository::DeleteTagsService do
is_expected.to include(status: :success)
end
+
+ context 'with failures' do
+ context 'when the dummy manifest generation fails' do
+ before do
+ stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', success: false)
+ end
+
+ it { is_expected.to include(status: :error) }
+ end
+
+ context 'when updating the tags fails' do
+ before do
+ stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
+ .to_return(status: 500, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
+ .to_return(status: 500, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3")
+ .to_return(status: 200, body: "", headers: {})
+ end
+
+ it { is_expected.to include(status: :error) }
+ end
+ end
end
end
end
@@ -121,10 +134,10 @@ describe Projects::ContainerRepository::DeleteTagsService do
end
end
- def stub_upload(content, digest)
+ def stub_upload(content, digest, success: true)
expect_any_instance_of(ContainerRegistry::Client)
.to receive(:upload_blob)
- .with(repository.path, content, digest) { double(success?: true ) }
+ .with(repository.path, content, digest) { double(success?: success ) }
end
def expect_delete_tag(digest)
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 2331281bd8e..642986bb176 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -81,7 +81,7 @@ describe Projects::DestroyService do
end
let!(:async) { true }
- it 'destroys them' do
+ it 'destroys them', :sidekiq_might_not_need_inline do
expect(RemoteMirror.count).to eq(0)
end
end
@@ -102,7 +102,7 @@ describe Projects::DestroyService do
end
let!(:async) { true }
- it 'destroys project and export' do
+ it 'destroys project and export', :sidekiq_might_not_need_inline do
expect { destroy_project(project_with_export, user) }.to change(ImportExportUpload, :count).by(-1)
expect(Project.all).not_to include(project_with_export)
@@ -153,7 +153,7 @@ describe Projects::DestroyService do
end
end
- context 'with async_execute' do
+ context 'with async_execute', :sidekiq_might_not_need_inline do
let(:async) { true }
context 'async delete of project with private issue visibility' do
@@ -346,21 +346,21 @@ describe Projects::DestroyService do
let(:path) { project.disk_path + '.git' }
before do
- expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_truthy
- expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, path)).to be_truthy
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
# Dont run sidekiq to check if renamed repository exists
Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
- expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_falsey
- expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_truthy
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_truthy
end
it 'restores the repositories' do
Sidekiq::Testing.fake! { described_class.new(project, user).attempt_repositories_rollback }
- expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_truthy
- expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, path)).to be_truthy
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 7e7e80ca240..5a3796fec3d 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -79,7 +79,7 @@ describe Projects::ForkService do
expect(fork_network.projects).to contain_exactly(@from_project, to_project)
end
- it 'imports the repository of the forked project' do
+ it 'imports the repository of the forked project', :sidekiq_might_not_need_inline do
to_project = fork_project(@from_project, @to_user, repository: true)
expect(to_project.empty_repo?).to be_falsy
diff --git a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
new file mode 100644
index 00000000000..34c37be6703
--- /dev/null
+++ b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::HashedStorage::BaseAttachmentService do
+ let(:project) { create(:project, :repository, storage_version: 0, skip_disk_validation: true) }
+
+ subject(:service) { described_class.new(project: project, old_disk_path: project.full_path, logger: nil) }
+
+ describe '#old_disk_path' do
+ it { is_expected.to respond_to :old_disk_path }
+ end
+
+ describe '#new_disk_path' do
+ it { is_expected.to respond_to :new_disk_path }
+ end
+
+ describe '#skipped?' do
+ it { is_expected.to respond_to :skipped? }
+ end
+
+ describe '#target_path_discardable?' do
+ it 'returns false' do
+ expect(subject.target_path_discardable?('something/something')).to be_falsey
+ end
+ end
+
+ describe '#discard_path!' do
+ it 'renames target path adding a timestamp at the end' do
+ target_path = Dir.mktmpdir
+ expect(Dir.exist?(target_path)).to be_truthy
+
+ Timecop.freeze do
+ suffix = Time.now.utc.to_i
+ subject.send(:discard_path!, target_path)
+
+ expected_renamed_path = "#{target_path}-#{suffix}"
+
+ expect(Dir.exist?(target_path)).to be_falsey
+ expect(Dir.exist?(expected_renamed_path)).to be_truthy
+ end
+ end
+ end
+
+ describe '#move_folder!' do
+ context 'when old_path is not a directory' do
+ it 'adds information to the logger and returns true' do
+ Tempfile.create do |old_path|
+ new_path = "#{old_path}-new"
+
+ expect(subject.send(:move_folder!, old_path, new_path)).to be_truthy
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 32ebec318f2..ab9d2bdba8f 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::HashedStorage::MigrateAttachmentsService do
- subject(:service) { described_class.new(project, project.full_path, logger: nil) }
+ subject(:service) { described_class.new(project: project, old_disk_path: project.full_path, logger: nil) }
let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
@@ -72,7 +72,23 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
FileUtils.mkdir_p(base_path(hashed_storage))
end
- it 'raises AttachmentCannotMoveError' do
+ it 'succeed when target is empty' do
+ expect { service.execute }.not_to raise_error
+ end
+
+ it 'succeed when target include only discardable items' do
+ Projects::HashedStorage::MigrateAttachmentsService::DISCARDABLE_PATHS.each do |path_fragment|
+ discardable_path = File.join(base_path(hashed_storage), path_fragment)
+ FileUtils.mkdir_p(discardable_path)
+ end
+
+ expect { service.execute }.not_to raise_error
+ end
+
+ it 'raises AttachmentCannotMoveError when there are non discardable items on target path' do
+ not_discardable_path = File.join(base_path(hashed_storage), 'something')
+ FileUtils.mkdir_p(not_discardable_path)
+
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
@@ -100,6 +116,18 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
end
end
+ context '#target_path_discardable?' do
+ it 'returns true when it include only items on the discardable list' do
+ hashed_attachments_path = File.join(base_path(hashed_storage))
+ Projects::HashedStorage::MigrateAttachmentsService::DISCARDABLE_PATHS.each do |path_fragment|
+ discardable_path = File.join(hashed_attachments_path, path_fragment)
+ FileUtils.mkdir_p(discardable_path)
+ end
+
+ expect(service.target_path_discardable?(hashed_attachments_path)).to be_truthy
+ end
+ end
+
def base_path(storage)
File.join(FileUploader.root, storage.disk_path)
end
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 70785c606a5..132b895fc35 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -10,7 +10,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
- subject(:service) { described_class.new(project, project.disk_path) }
+ subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
describe '#execute' do
let(:old_disk_path) { legacy_storage.disk_path }
diff --git a/spec/services/projects/hashed_storage/migration_service_spec.rb b/spec/services/projects/hashed_storage/migration_service_spec.rb
index e3191cd7ebc..f3ac26e7761 100644
--- a/spec/services/projects/hashed_storage/migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migration_service_spec.rb
@@ -10,13 +10,14 @@ describe Projects::HashedStorage::MigrationService do
describe '#execute' do
context 'repository migration' do
- let(:repository_service) { Projects::HashedStorage::MigrateRepositoryService.new(project, project.full_path, logger: logger) }
+ let(:repository_service) do
+ Projects::HashedStorage::MigrateRepositoryService.new(project: project,
+ old_disk_path: project.full_path,
+ logger: logger)
+ end
it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
- expect(Projects::HashedStorage::MigrateRepositoryService)
- .to receive(:new)
- .with(project, project.full_path, logger: logger)
- .and_return(repository_service)
+ expect(service).to receive(:migrate_repository_service).and_return(repository_service)
expect(repository_service).to receive(:execute)
service.execute
@@ -31,13 +32,14 @@ describe Projects::HashedStorage::MigrationService do
end
context 'attachments migration' do
- let(:attachments_service) { Projects::HashedStorage::MigrateAttachmentsService.new(project, project.full_path, logger: logger) }
+ let(:attachments_service) do
+ Projects::HashedStorage::MigrateAttachmentsService.new(project: project,
+ old_disk_path: project.full_path,
+ logger: logger)
+ end
it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
- expect(Projects::HashedStorage::MigrateAttachmentsService)
- .to receive(:new)
- .with(project, project.full_path, logger: logger)
- .and_return(attachments_service)
+ expect(service).to receive(:migrate_attachments_service).and_return(attachments_service)
expect(attachments_service).to receive(:execute)
service.execute
diff --git a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
index 815c85e0866..c2ba9626f41 100644
--- a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::HashedStorage::RollbackAttachmentsService do
- subject(:service) { described_class.new(project, logger: nil) }
+ subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path, logger: nil) }
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
index 3ca9ee5bee5..97c7c0af946 100644
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -10,7 +10,7 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
- subject(:service) { described_class.new(project, project.disk_path) }
+ subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
describe '#execute' do
let(:old_disk_path) { hashed_storage.disk_path }
diff --git a/spec/services/projects/hashed_storage/rollback_service_spec.rb b/spec/services/projects/hashed_storage/rollback_service_spec.rb
index 427d1535559..48d4eac9eb7 100644
--- a/spec/services/projects/hashed_storage/rollback_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_service_spec.rb
@@ -6,17 +6,15 @@ describe Projects::HashedStorage::RollbackService do
let(:project) { create(:project, :empty_repo, :wiki_repo) }
let(:logger) { double }
- subject(:service) { described_class.new(project, project.full_path, logger: logger) }
+ subject(:service) { described_class.new(project, project.disk_path, logger: logger) }
describe '#execute' do
context 'attachments rollback' do
let(:attachments_service_class) { Projects::HashedStorage::RollbackAttachmentsService }
- let(:attachments_service) { attachments_service_class.new(project, logger: logger) }
+ let(:attachments_service) { attachments_service_class.new(project: project, old_disk_path: project.disk_path, logger: logger) }
it 'delegates rollback to Projects::HashedStorage::RollbackAttachmentsService' do
- expect(attachments_service_class).to receive(:new)
- .with(project, logger: logger)
- .and_return(attachments_service)
+ expect(service).to receive(:rollback_attachments_service).and_return(attachments_service)
expect(attachments_service).to receive(:execute)
service.execute
@@ -31,15 +29,12 @@ describe Projects::HashedStorage::RollbackService do
end
context 'repository rollback' do
+ let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:repository_service_class) { Projects::HashedStorage::RollbackRepositoryService }
- let(:repository_service) { repository_service_class.new(project, project.full_path, logger: logger) }
+ let(:repository_service) { repository_service_class.new(project: project, old_disk_path: project.disk_path, logger: logger) }
it 'delegates rollback to RollbackRepositoryService' do
- project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
-
- expect(repository_service_class).to receive(:new)
- .with(project, project.full_path, logger: logger)
- .and_return(repository_service)
+ expect(service).to receive(:rollback_repository_service).and_return(repository_service)
expect(repository_service).to receive(:execute)
service.execute
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index 146d656c909..a557e61da78 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -66,7 +66,7 @@ describe Projects::ImportExport::ExportService do
end
it 'saves the project in the file system' do
- expect(Gitlab::ImportExport::Saver).to receive(:save).with(project: project, shared: shared)
+ expect(Gitlab::ImportExport::Saver).to receive(:save).with(exportable: project, shared: shared)
service.execute
end
diff --git a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
index 66233787d3a..aca59079b3c 100644
--- a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
@@ -16,6 +16,13 @@ describe Projects::LfsPointers::LfsLinkService do
end
describe '#execute' do
+ it 'raises an error when trying to link too many objects at once' do
+ oids = Array.new(described_class::MAX_OIDS) { |i| "oid-#{i}" }
+ oids << 'the straw'
+
+ expect { subject.execute(oids) }.to raise_error(described_class::TooManyOidsError)
+ end
+
it 'links existing lfs objects to the project' do
expect(project.all_lfs_objects.count).to eq 2
@@ -28,7 +35,7 @@ describe Projects::LfsPointers::LfsLinkService do
it 'returns linked oids' do
linked = lfs_objects_project.map(&:lfs_object).map(&:oid) << new_lfs_object.oid
- expect(subject.execute(new_oid_list.keys)).to eq linked
+ expect(subject.execute(new_oid_list.keys)).to contain_exactly(*linked)
end
it 'links in batches' do
@@ -48,5 +55,26 @@ describe Projects::LfsPointers::LfsLinkService do
expect(project.all_lfs_objects.count).to eq 9
expect(linked.size).to eq 7
end
+
+ it 'only queries for the batch that will be processed', :aggregate_failures do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ oids = %w(one two)
+
+ expect(LfsObject).to receive(:where).with(oid: %w(one)).once.and_call_original
+ expect(LfsObject).to receive(:where).with(oid: %w(two)).once.and_call_original
+
+ subject.execute(oids)
+ end
+
+ it 'only queries 3 times' do
+ # make sure that we don't count the queries in the setup
+ new_oid_list
+
+ # These are repeated for each batch of oids: maximum (MAX_OIDS / BATCH_SIZE) times
+ # 1. Load the batch of lfs object ids that we might know already
+ # 2. Load the objects that have not been linked to the project yet
+ # 3. Insert the lfs_objects_projects for that batch
+ expect { subject.execute(new_oid_list.keys) }.not_to exceed_query_limit(3)
+ end
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 31bd0f0f836..c848a5397e1 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -151,7 +151,7 @@ describe Projects::UpdateService do
context 'when we update project but not enabling a wiki' do
it 'does not try to create an empty wiki' do
- Gitlab::Shell.new.rm_directory(project.repository_storage, project.wiki.path)
+ TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
result = update_project(project, user, { name: 'test1' })
@@ -172,7 +172,7 @@ describe Projects::UpdateService do
context 'when enabling a wiki' do
it 'creates a wiki' do
project.project_feature.update(wiki_access_level: ProjectFeature::DISABLED)
- Gitlab::Shell.new.rm_directory(project.repository_storage, project.wiki.path)
+ TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
result = update_project(project, user, project_feature_attributes: { wiki_access_level: ProjectFeature::ENABLED })
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 83101add724..e2ed7581ad4 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -115,51 +115,36 @@ describe SystemNoteService do
end
describe '.merge_when_pipeline_succeeds' do
- let(:pipeline) { build(:ci_pipeline_without_jobs )}
- let(:noteable) do
- create(:merge_request, source_project: project, target_project: project)
- end
-
- subject { described_class.merge_when_pipeline_succeeds(noteable, project, author, pipeline.sha) }
+ it 'calls MergeRequestsService' do
+ sha = double
- it_behaves_like 'a system note' do
- let(:action) { 'merge' }
- end
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:merge_when_pipeline_succeeds).with(sha)
+ end
- it "posts the 'merge when pipeline succeeds' system note" do
- expect(subject.note).to match(%r{enabled an automatic merge when the pipeline for (\w+/\w+@)?\h{40} succeeds})
+ described_class.merge_when_pipeline_succeeds(noteable, project, author, sha)
end
end
describe '.cancel_merge_when_pipeline_succeeds' do
- let(:noteable) do
- create(:merge_request, source_project: project, target_project: project)
- end
-
- subject { described_class.cancel_merge_when_pipeline_succeeds(noteable, project, author) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'merge' }
- end
+ it 'calls MergeRequestsService' do
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:cancel_merge_when_pipeline_succeeds)
+ end
- it "posts the 'merge when pipeline succeeds' system note" do
- expect(subject.note).to eq "canceled the automatic merge"
+ described_class.cancel_merge_when_pipeline_succeeds(noteable, project, author)
end
end
describe '.abort_merge_when_pipeline_succeeds' do
- let(:noteable) do
- create(:merge_request, source_project: project, target_project: project)
- end
+ it 'calls MergeRequestsService' do
+ reason = double
- subject { described_class.abort_merge_when_pipeline_succeeds(noteable, project, author, 'merge request was closed') }
-
- it_behaves_like 'a system note' do
- let(:action) { 'merge' }
- end
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:abort_merge_when_pipeline_succeeds).with(reason)
+ end
- it "posts the 'merge when pipeline succeeds' system note" do
- expect(subject.note).to eq "aborted the automatic merge because merge request was closed"
+ described_class.abort_merge_when_pipeline_succeeds(noteable, project, author, reason)
end
end
@@ -196,77 +181,55 @@ describe SystemNoteService do
end
describe '.change_branch' do
- subject { described_class.change_branch(noteable, project, author, 'target', old_branch, new_branch) }
-
- let(:old_branch) { 'old_branch'}
- let(:new_branch) { 'new_branch'}
-
- it_behaves_like 'a system note' do
- let(:action) { 'branch' }
- end
+ it 'calls MergeRequestsService' do
+ old_branch = double
+ new_branch = double
+ branch_type = double
- context 'when target branch name changed' do
- it 'sets the note text' do
- expect(subject.note).to eq "changed target branch from `#{old_branch}` to `#{new_branch}`"
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:change_branch).with(branch_type, old_branch, new_branch)
end
+
+ described_class.change_branch(noteable, project, author, branch_type, old_branch, new_branch)
end
end
describe '.change_branch_presence' do
- subject { described_class.change_branch_presence(noteable, project, author, :source, 'feature', :delete) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'branch' }
- end
+ it 'calls MergeRequestsService' do
+ presence = double
+ branch = double
+ branch_type = double
- context 'when source branch deleted' do
- it 'sets the note text' do
- expect(subject.note).to eq "deleted source branch `feature`"
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:change_branch_presence).with(branch_type, branch, presence)
end
+
+ described_class.change_branch_presence(noteable, project, author, branch_type, branch, presence)
end
end
describe '.new_issue_branch' do
- let(:branch) { '1-mepmep' }
+ it 'calls MergeRequestsService' do
+ branch = double
+ branch_project = double
- subject { described_class.new_issue_branch(noteable, project, author, branch, branch_project: branch_project) }
-
- shared_examples_for 'a system note for new issue branch' do
- it_behaves_like 'a system note' do
- let(:action) { 'branch' }
- end
-
- context 'when a branch is created from the new branch button' do
- it 'sets the note text' do
- expect(subject.note).to start_with("created branch [`#{branch}`]")
- end
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:new_issue_branch).with(branch, branch_project: branch_project)
end
- end
- context 'branch_project is set' do
- let(:branch_project) { create(:project, :repository) }
-
- it_behaves_like 'a system note for new issue branch'
- end
-
- context 'branch_project is not set' do
- let(:branch_project) { nil }
-
- it_behaves_like 'a system note for new issue branch'
+ described_class.new_issue_branch(noteable, project, author, branch, branch_project: branch_project)
end
end
describe '.new_merge_request' do
- subject { described_class.new_merge_request(noteable, project, author, merge_request) }
-
- let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ it 'calls MergeRequestsService' do
+ merge_request = double
- it_behaves_like 'a system note' do
- let(:action) { 'merge' }
- end
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:new_merge_request).with(merge_request)
+ end
- it 'sets the new merge request note text' do
- expect(subject.note).to eq("created merge request #{merge_request.to_reference(project)} to address this issue")
+ described_class.new_merge_request(noteable, project, author, merge_request)
end
end
@@ -642,57 +605,24 @@ describe SystemNoteService do
end
describe '.handle_merge_request_wip' do
- context 'adding wip note' do
- let(:noteable) { create(:merge_request, source_project: project, title: 'WIP Lorem ipsum') }
-
- subject { described_class.handle_merge_request_wip(noteable, project, author) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'title' }
+ it 'calls MergeRequestsService' do
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:handle_merge_request_wip)
end
- it 'sets the note text' do
- expect(subject.note).to eq 'marked as a **Work In Progress**'
- end
- end
-
- context 'removing wip note' do
- let(:noteable) { create(:merge_request, source_project: project, title: 'Lorem ipsum') }
-
- subject { described_class.handle_merge_request_wip(noteable, project, author) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'title' }
- end
-
- it 'sets the note text' do
- expect(subject.note).to eq 'unmarked as a **Work In Progress**'
- end
+ described_class.handle_merge_request_wip(noteable, project, author)
end
end
describe '.add_merge_request_wip_from_commit' do
- let(:noteable) do
- create(:merge_request, source_project: project, target_project: project)
- end
-
- subject do
- described_class.add_merge_request_wip_from_commit(
- noteable,
- project,
- author,
- noteable.diff_head_commit
- )
- end
+ it 'calls MergeRequestsService' do
+ commit = double
- it_behaves_like 'a system note' do
- let(:action) { 'title' }
- end
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:add_merge_request_wip_from_commit).with(commit)
+ end
- it "posts the 'marked as a Work In Progress from commit' system note" do
- expect(subject.note).to match(
- /marked as a \*\*Work In Progress\*\* from #{Commit.reference_pattern}/
- )
+ described_class.add_merge_request_wip_from_commit(noteable, project, author, commit)
end
end
@@ -709,75 +639,25 @@ describe SystemNoteService do
end
describe '.resolve_all_discussions' do
- let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
-
- subject { described_class.resolve_all_discussions(noteable, project, author) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'discussion' }
- end
+ it 'calls MergeRequestsService' do
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:resolve_all_discussions)
+ end
- it 'sets the note text' do
- expect(subject.note).to eq 'resolved all threads'
+ described_class.resolve_all_discussions(noteable, project, author)
end
end
describe '.diff_discussion_outdated' do
- let(:discussion) { create(:diff_note_on_merge_request, project: project).to_discussion }
- let(:merge_request) { discussion.noteable }
- let(:change_position) { discussion.position }
+ it 'calls MergeRequestsService' do
+ discussion = double
+ change_position = double
- def reloaded_merge_request
- MergeRequest.find(merge_request.id)
- end
-
- subject { described_class.diff_discussion_outdated(discussion, project, author, change_position) }
-
- it_behaves_like 'a system note' do
- let(:expected_noteable) { discussion.first_note.noteable }
- let(:action) { 'outdated' }
- end
-
- context 'when the change_position is valid for the discussion' do
- it 'creates a new note in the discussion' do
- # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
- expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
- end
-
- it 'links to the diff in the system note' do
- diff_id = merge_request.merge_request_diff.id
- line_code = change_position.line_code(project.repository)
- link = diffs_project_merge_request_path(project, merge_request, diff_id: diff_id, anchor: line_code)
-
- expect(subject.note).to eq("changed this line in [version 1 of the diff](#{link})")
+ expect_next_instance_of(::SystemNotes::MergeRequestsService) do |service|
+ expect(service).to receive(:diff_discussion_outdated).with(discussion, change_position)
end
- context 'discussion is on an image' do
- let(:discussion) { create(:image_diff_note_on_merge_request, project: project).to_discussion }
-
- it 'links to the diff in the system note' do
- diff_id = merge_request.merge_request_diff.id
- file_hash = change_position.file_hash
- link = diffs_project_merge_request_path(project, merge_request, diff_id: diff_id, anchor: file_hash)
-
- expect(subject.note).to eq("changed this file in [version 1 of the diff](#{link})")
- end
- end
- end
-
- context 'when the change_position does not point to a valid version' do
- before do
- allow(merge_request).to receive(:version_params_for).and_return(nil)
- end
-
- it 'creates a new note in the discussion' do
- # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
- expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
- end
-
- it 'does not create a link' do
- expect(subject.note).to eq('changed this line in version 1 of the diff')
- end
+ described_class.diff_discussion_outdated(discussion, project, author, change_position)
end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 5023abad4cd..ba484d95c9c 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -395,7 +395,7 @@ describe ::SystemNotes::IssuablesService do
end
end
- context 'commit with cross-reference from fork' do
+ context 'commit with cross-reference from fork', :sidekiq_might_not_need_inline do
let(:author2) { create(:project_member, :reporter, user: create(:user), project: project).user }
let(:forked_project) { fork_project(project, author2, repository: true) }
let(:commit2) { forked_project.commit }
diff --git a/spec/services/system_notes/merge_requests_service_spec.rb b/spec/services/system_notes/merge_requests_service_spec.rb
new file mode 100644
index 00000000000..6d2473e8c03
--- /dev/null
+++ b/spec/services/system_notes/merge_requests_service_spec.rb
@@ -0,0 +1,243 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::SystemNotes::MergeRequestsService do
+ include Gitlab::Routing
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:author) { create(:user) }
+
+ let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
+
+ let(:service) { described_class.new(noteable: noteable, project: project, author: author) }
+
+ describe '.merge_when_pipeline_succeeds' do
+ let(:pipeline) { build(:ci_pipeline) }
+
+ subject { service.merge_when_pipeline_succeeds(pipeline.sha) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'merge' }
+ end
+
+ it "posts the 'merge when pipeline succeeds' system note" do
+ expect(subject.note).to match(%r{enabled an automatic merge when the pipeline for (\w+/\w+@)?\h{40} succeeds})
+ end
+ end
+
+ describe '.cancel_merge_when_pipeline_succeeds' do
+ subject { service.cancel_merge_when_pipeline_succeeds }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'merge' }
+ end
+
+ it "posts the 'merge when pipeline succeeds' system note" do
+ expect(subject.note).to eq "canceled the automatic merge"
+ end
+ end
+
+ describe '.abort_merge_when_pipeline_succeeds' do
+ subject { service.abort_merge_when_pipeline_succeeds('merge request was closed') }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'merge' }
+ end
+
+ it "posts the 'merge when pipeline succeeds' system note" do
+ expect(subject.note).to eq "aborted the automatic merge because merge request was closed"
+ end
+ end
+
+ describe '.handle_merge_request_wip' do
+ context 'adding wip note' do
+ let(:noteable) { create(:merge_request, source_project: project, title: 'WIP Lorem ipsum') }
+
+ subject { service.handle_merge_request_wip }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'marked as a **Work In Progress**'
+ end
+ end
+
+ context 'removing wip note' do
+ subject { service.handle_merge_request_wip }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'unmarked as a **Work In Progress**'
+ end
+ end
+ end
+
+ describe '.add_merge_request_wip_from_commit' do
+ subject { service.add_merge_request_wip_from_commit(noteable.diff_head_commit) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
+
+ it "posts the 'marked as a Work In Progress from commit' system note" do
+ expect(subject.note).to match(
+ /marked as a \*\*Work In Progress\*\* from #{Commit.reference_pattern}/
+ )
+ end
+ end
+
+ describe '.resolve_all_discussions' do
+ subject { service.resolve_all_discussions }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'discussion' }
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'resolved all threads'
+ end
+ end
+
+ describe '.diff_discussion_outdated' do
+ let(:discussion) { create(:diff_note_on_merge_request, project: project).to_discussion }
+ let(:merge_request) { discussion.noteable }
+ let(:change_position) { discussion.position }
+
+ def reloaded_merge_request
+ MergeRequest.find(merge_request.id)
+ end
+
+ let(:service) { described_class.new(project: project, author: author) }
+
+ subject { service.diff_discussion_outdated(discussion, change_position) }
+
+ it_behaves_like 'a system note' do
+ let(:expected_noteable) { discussion.first_note.noteable }
+ let(:action) { 'outdated' }
+ end
+
+ context 'when the change_position is valid for the discussion' do
+ it 'creates a new note in the discussion' do
+ # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
+ expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
+ end
+
+ it 'links to the diff in the system note' do
+ diff_id = merge_request.merge_request_diff.id
+ line_code = change_position.line_code(project.repository)
+ link = diffs_project_merge_request_path(project, merge_request, diff_id: diff_id, anchor: line_code)
+
+ expect(subject.note).to eq("changed this line in [version 1 of the diff](#{link})")
+ end
+
+ context 'discussion is on an image' do
+ let(:discussion) { create(:image_diff_note_on_merge_request, project: project).to_discussion }
+
+ it 'links to the diff in the system note' do
+ diff_id = merge_request.merge_request_diff.id
+ file_hash = change_position.file_hash
+ link = diffs_project_merge_request_path(project, merge_request, diff_id: diff_id, anchor: file_hash)
+
+ expect(subject.note).to eq("changed this file in [version 1 of the diff](#{link})")
+ end
+ end
+ end
+
+ context 'when the change_position does not point to a valid version' do
+ before do
+ allow(merge_request).to receive(:version_params_for).and_return(nil)
+ end
+
+ it 'creates a new note in the discussion' do
+ # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
+ expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
+ end
+
+ it 'does not create a link' do
+ expect(subject.note).to eq('changed this line in version 1 of the diff')
+ end
+ end
+ end
+
+ describe '.change_branch' do
+ subject { service.change_branch('target', old_branch, new_branch) }
+
+ let(:old_branch) { 'old_branch'}
+ let(:new_branch) { 'new_branch'}
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'branch' }
+ end
+
+ context 'when target branch name changed' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "changed target branch from `#{old_branch}` to `#{new_branch}`"
+ end
+ end
+ end
+
+ describe '.change_branch_presence' do
+ subject { service.change_branch_presence(:source, 'feature', :delete) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'branch' }
+ end
+
+ context 'when source branch deleted' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "deleted source branch `feature`"
+ end
+ end
+ end
+
+ describe '.new_issue_branch' do
+ let(:branch) { '1-mepmep' }
+
+ subject { service.new_issue_branch(branch, branch_project: branch_project) }
+
+ shared_examples_for 'a system note for new issue branch' do
+ it_behaves_like 'a system note' do
+ let(:action) { 'branch' }
+ end
+
+ context 'when a branch is created from the new branch button' do
+ it 'sets the note text' do
+ expect(subject.note).to start_with("created branch [`#{branch}`]")
+ end
+ end
+ end
+
+ context 'branch_project is set' do
+ let(:branch_project) { create(:project, :repository) }
+
+ it_behaves_like 'a system note for new issue branch'
+ end
+
+ context 'branch_project is not set' do
+ let(:branch_project) { nil }
+
+ it_behaves_like 'a system note for new issue branch'
+ end
+ end
+
+ describe '.new_merge_request' do
+ subject { service.new_merge_request(merge_request) }
+
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: generate(:branch), target_project: project) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'merge' }
+ end
+
+ it 'sets the new merge request note text' do
+ expect(subject.note).to eq("created merge request #{merge_request.to_reference(project)} to address this issue")
+ end
+ end
+end
diff --git a/spec/services/users/signup_service_spec.rb b/spec/services/users/signup_service_spec.rb
new file mode 100644
index 00000000000..7d3cd614142
--- /dev/null
+++ b/spec/services/users/signup_service_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Users::SignupService do
+ let(:user) { create(:user, setup_for_company: true) }
+
+ describe '#execute' do
+ context 'when updating name' do
+ it 'updates the name attribute' do
+ result = update_user(user, name: 'New Name')
+
+ expect(result).to eq(status: :success)
+ expect(user.reload.name).to eq('New Name')
+ end
+
+ it 'returns an error result when name is missing' do
+ result = update_user(user, name: '')
+
+ expect(user.reload.name).not_to be_blank
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to include("Name can't be blank")
+ end
+ end
+
+ context 'when updating role' do
+ it 'updates the role attribute' do
+ result = update_user(user, role: 'development_team_lead')
+
+ expect(result).to eq(status: :success)
+ expect(user.reload.role).to eq('development_team_lead')
+ end
+
+ it 'returns an error result when role is missing' do
+ result = update_user(user, role: '')
+
+ expect(user.reload.role).not_to be_blank
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("Role can't be blank")
+ end
+ end
+
+ context 'when updating setup_for_company' do
+ it 'updates the setup_for_company attribute' do
+ result = update_user(user, setup_for_company: 'false')
+
+ expect(result).to eq(status: :success)
+ expect(user.reload.setup_for_company).to be(false)
+ end
+
+ it 'returns an error result when setup_for_company is missing' do
+ result = update_user(user, setup_for_company: '')
+
+ expect(user.reload.setup_for_company).not_to be_blank
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("Setup for company can't be blank")
+ end
+ end
+
+ def update_user(user, opts)
+ described_class.new(user, opts).execute
+ end
+ end
+end
diff --git a/spec/services/zoom_notes_service_spec.rb b/spec/services/zoom_notes_service_spec.rb
deleted file mode 100644
index 419ecf3f374..00000000000
--- a/spec/services/zoom_notes_service_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe ZoomNotesService do
- describe '#execute' do
- let(:issue) { OpenStruct.new(description: description) }
- let(:project) { Object.new }
- let(:user) { Object.new }
- let(:description) { 'an issue description' }
- let(:old_description) { nil }
-
- subject { described_class.new(issue, project, user, old_description: old_description) }
-
- shared_examples 'no notifications' do
- it "doesn't create notifications" do
- expect(SystemNoteService).not_to receive(:zoom_link_added)
- expect(SystemNoteService).not_to receive(:zoom_link_removed)
-
- subject.execute
- end
- end
-
- it_behaves_like 'no notifications'
-
- context 'when the zoom link exists in both description and old_description' do
- let(:description) { 'a changed issue description https://zoom.us/j/123' }
- let(:old_description) { 'an issue description https://zoom.us/j/123' }
-
- it_behaves_like 'no notifications'
- end
-
- context "when the zoom link doesn't exist in both description and old_description" do
- let(:description) { 'a changed issue description' }
- let(:old_description) { 'an issue description' }
-
- it_behaves_like 'no notifications'
- end
-
- context 'when description == old_description' do
- let(:old_description) { 'an issue description' }
-
- it_behaves_like 'no notifications'
- end
-
- context 'when the description contains a zoom link and old_description is nil' do
- let(:description) { 'a changed issue description https://zoom.us/j/123' }
-
- it 'creates a zoom_link_added notification' do
- expect(SystemNoteService).to receive(:zoom_link_added).with(issue, project, user)
- expect(SystemNoteService).not_to receive(:zoom_link_removed)
-
- subject.execute
- end
- end
-
- context 'when the zoom link has been added to the description' do
- let(:description) { 'a changed issue description https://zoom.us/j/123' }
- let(:old_description) { 'an issue description' }
-
- it 'creates a zoom_link_added notification' do
- expect(SystemNoteService).to receive(:zoom_link_added).with(issue, project, user)
- expect(SystemNoteService).not_to receive(:zoom_link_removed)
-
- subject.execute
- end
- end
-
- context 'when the zoom link has been removed from the description' do
- let(:description) { 'a changed issue description' }
- let(:old_description) { 'an issue description https://zoom.us/j/123' }
-
- it 'creates a zoom_link_removed notification' do
- expect(SystemNoteService).not_to receive(:zoom_link_added).with(issue, project, user)
- expect(SystemNoteService).to receive(:zoom_link_removed)
-
- subject.execute
- end
- end
- end
-end
diff --git a/spec/sidekiq/cron/job_gem_dependency_spec.rb b/spec/sidekiq/cron/job_gem_dependency_spec.rb
index 2e7de75fd08..20347b4d306 100644
--- a/spec/sidekiq/cron/job_gem_dependency_spec.rb
+++ b/spec/sidekiq/cron/job_gem_dependency_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Sidekiq::Cron::Job do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 7a5e570558e..d7533f99683 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -66,6 +66,11 @@ RSpec.configure do |config|
config.infer_spec_type_from_file_location!
config.full_backtrace = !!ENV['CI']
+ unless ENV['CI']
+ # Re-run failures locally with `--only-failures`
+ config.example_status_persistence_file_path = './spec/examples.txt'
+ end
+
config.define_derived_metadata(file_path: %r{(ee)?/spec/.+_spec\.rb\z}) do |metadata|
location = metadata[:location]
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 7b3b966bd50..2bd4750dffa 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -101,8 +101,12 @@ RSpec.configure do |config|
config.after(:example, :js) do |example|
# when a test fails, display any messages in the browser's console
- if example.exception
+ # but fail don't add the message if the failure is a pending test that got
+ # fixed. If we raised the `JSException` the fixed test would be marked as
+ # failed again.
+ if example.exception && !example.exception.is_a?(RSpec::Core::Pending::PendingExampleFixedError)
console = page.driver.browser.manage.logs.get(:browser)&.reject { |log| log.message =~ JS_CONSOLE_FILTER }
+
if console.present?
message = "Unexpected browser console output:\n" + console.map(&:message).join("\n")
raise JSConsoleError, message
diff --git a/spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb b/spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
index d636c1cf6cd..8a8a2f714bc 100644
--- a/spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
+++ b/spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
@@ -10,6 +10,8 @@ shared_context 'Ldap::OmniauthCallbacksController' do
let(:provider) { 'ldapmain' }
let(:valid_login?) { true }
let(:user) { create(:omniauth_user, extern_uid: uid, provider: provider) }
+ let(:ldap_setting_defaults) { { enabled: true, servers: ldap_server_config } }
+ let(:ldap_settings) { ldap_setting_defaults }
let(:ldap_server_config) do
{ main: ldap_config_defaults(:main) }
end
@@ -23,7 +25,7 @@ shared_context 'Ldap::OmniauthCallbacksController' do
end
before do
- stub_ldap_setting(enabled: true, servers: ldap_server_config)
+ stub_ldap_setting(ldap_settings)
described_class.define_providers!
Rails.application.reload_routes!
@@ -36,4 +38,8 @@ shared_context 'Ldap::OmniauthCallbacksController' do
after do
Rails.application.env_config['omniauth.auth'] = @original_env_config_omniauth_auth
end
+
+ after(:all) do
+ Rails.application.reload_routes!
+ end
end
diff --git a/spec/support/cycle_analytics_helpers/test_generation.rb b/spec/support/cycle_analytics_helpers/test_generation.rb
index c57abbd96c6..2096ec90c5b 100644
--- a/spec/support/cycle_analytics_helpers/test_generation.rb
+++ b/spec/support/cycle_analytics_helpers/test_generation.rb
@@ -29,7 +29,7 @@ module CycleAnalyticsHelpers
scenarios.each do |start_time_conditions, end_time_conditions|
context "start condition: #{start_time_conditions.map(&:first).to_sentence}" do
context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do
- it "finds the median of available durations between the two conditions" do
+ it "finds the median of available durations between the two conditions", :sidekiq_might_not_need_inline do
time_differences = Array.new(5) do |index|
data = data_fn[self]
start_time = (index * 10).days.from_now
diff --git a/spec/support/generate-seed-repo-rb b/spec/support/generate-seed-repo-rb
index bee9d419376..b63ff7147ec 100755
--- a/spec/support/generate-seed-repo-rb
+++ b/spec/support/generate-seed-repo-rb
@@ -1,4 +1,5 @@
#!/usr/bin/env ruby
+# frozen_string_literal: true
#
# # generate-seed-repo-rb
#
@@ -15,9 +16,9 @@
require 'erb'
require 'tempfile'
-SOURCE = File.expand_path('gitlab-git-test.git', __dir__).freeze
-SCRIPT_NAME = 'generate-seed-repo-rb'.freeze
-REPO_NAME = 'gitlab-git-test.git'.freeze
+SOURCE = File.expand_path('gitlab-git-test.git', __dir__)
+SCRIPT_NAME = 'generate-seed-repo-rb'
+REPO_NAME = 'gitlab-git-test.git'
def main
Dir.mktmpdir do |dir|
diff --git a/spec/support/helpers/access_matchers_helpers.rb b/spec/support/helpers/access_matchers_helpers.rb
new file mode 100644
index 00000000000..9100f245d36
--- /dev/null
+++ b/spec/support/helpers/access_matchers_helpers.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+module AccessMatchersHelpers
+ USER_ACCESSOR_METHOD_NAME = 'user'
+
+ def provide_user(role, membership = nil)
+ case role
+ when :admin
+ create(:admin)
+ when :auditor
+ create(:user, :auditor)
+ when :user
+ create(:user)
+ when :external
+ create(:user, :external)
+ when :visitor, :anonymous
+ nil
+ when User
+ role
+ when *Gitlab::Access.sym_options_with_owner.keys # owner, maintainer, developer, reporter, guest
+ raise ArgumentError, "cannot provide #{role} when membership reference is blank" unless membership
+
+ provide_user_by_membership(role, membership)
+ else
+ raise ArgumentError, "cannot provide user of an unknown role #{role}"
+ end
+ end
+
+ def provide_user_by_membership(role, membership)
+ if role == :owner && membership.owner
+ membership.owner
+ else
+ create(:user).tap do |user|
+ membership.public_send(:"add_#{role}", user)
+ end
+ end
+ end
+
+ def raise_if_non_block_expectation!(actual)
+ raise ArgumentError, 'This matcher supports block expectations only.' unless actual.is_a?(Proc)
+ end
+
+ def update_owner(objects, user)
+ return unless objects
+
+ objects.each do |object|
+ if object.respond_to?(:owner)
+ object.update_attribute(:owner, user)
+ elsif object.respond_to?(:user)
+ object.update_attribute(:user, user)
+ else
+ raise ArgumentError, "cannot own this object #{object}"
+ end
+ end
+ end
+
+ def patch_example_group(user)
+ return if user.nil? # for anonymous users
+
+ # This call is evaluated in context of ExampleGroup instance in which the matcher is called. Overrides the `user`
+ # (or defined by `method_name`) method generated by `let` definition in example group before it's used by `subject`.
+ # This override is per concrete example only because the example group class gets re-created for each example.
+ instance_eval(<<~CODE, __FILE__, __LINE__ + 1)
+ if instance_variable_get(:@__#{USER_ACCESSOR_METHOD_NAME}_patched)
+ raise ArgumentError, 'An access matcher be_allowed_for/be_denied_for can be used only once per example (`it` block)'
+ end
+ instance_variable_set(:@__#{USER_ACCESSOR_METHOD_NAME}_patched, true)
+
+ def #{USER_ACCESSOR_METHOD_NAME}
+ @#{USER_ACCESSOR_METHOD_NAME} ||= User.find(#{user.id})
+ end
+ CODE
+ end
+
+ def prepare_matcher_environment(role, membership, owned_objects)
+ user = provide_user(role, membership)
+
+ if user
+ update_owner(owned_objects, user)
+ patch_example_group(user)
+ end
+ end
+
+ def run_matcher(action, role, membership, owned_objects)
+ raise_if_non_block_expectation!(action)
+
+ prepare_matcher_environment(role, membership, owned_objects)
+
+ if block_given?
+ yield action
+ else
+ action.call
+ end
+ end
+end
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index a604359942f..d101b092e7d 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -77,7 +77,7 @@ module CycleAnalyticsHelpers
.new(project, user)
.closed_by_merge_requests(issue)
- merge_requests.each { |merge_request| MergeRequests::MergeService.new(project, user).execute(merge_request) }
+ merge_requests.each { |merge_request| MergeRequests::MergeService.new(project, user, sha: merge_request.diff_head_sha).execute(merge_request) }
end
def deploy_master(user, project, environment: 'production')
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index 39c818b1763..5dc87c36931 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -114,6 +114,10 @@ module FilteredSearchHelpers
create_token('Milestone', milestone_name, symbol)
end
+ def release_token(release_tag = nil)
+ create_token('Release', release_tag)
+ end
+
def label_token(label_name = nil, has_symbol = true)
symbol = has_symbol ? '~' : nil
create_token('Label', label_name, symbol)
diff --git a/spec/support/helpers/grafana_api_helpers.rb b/spec/support/helpers/grafana_api_helpers.rb
new file mode 100644
index 00000000000..e47b1a808f2
--- /dev/null
+++ b/spec/support/helpers/grafana_api_helpers.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module GrafanaApiHelpers
+ def valid_grafana_dashboard_link(base_url)
+ base_url +
+ '/d/XDaNK6amz/gitlab-omnibus-redis' \
+ '?from=1570397739557&to=1570484139557' \
+ '&var-instance=localhost:9121&panelId=8'
+ end
+
+ def stub_dashboard_request(base_url, path: '/api/dashboards/uid/XDaNK6amz', body: nil)
+ body ||= fixture_file('grafana/dashboard_response.json')
+
+ stub_request(:get, "#{base_url}#{path}")
+ .to_return(
+ status: 200,
+ body: body,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ def stub_datasource_request(base_url, path: '/api/datasources/name/GitLab%20Omnibus', body: nil)
+ body ||= fixture_file('grafana/datasource_response.json')
+
+ stub_request(:get, "#{base_url}#{path}")
+ .to_return(
+ status: 200,
+ body: body,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ def stub_all_grafana_proxy_requests(base_url)
+ stub_request(:any, /#{base_url}\/api\/datasources\/proxy/)
+ .to_return(
+ status: 200,
+ body: fixture_file('grafana/proxy_response.json'),
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 6fb1d279456..80a3f7df05f 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -37,9 +37,12 @@ module GraphqlHelpers
# BatchLoader::GraphQL returns a wrapper, so we need to :sync in order
# to get the actual values
def batch_sync(max_queries: nil, &blk)
- result = batch(max_queries: nil, &blk)
+ wrapper = proc do
+ lazy_vals = yield
+ lazy_vals.is_a?(Array) ? lazy_vals.map(&:sync) : lazy_vals&.sync
+ end
- result.is_a?(Array) ? result.map(&:sync) : result&.sync
+ batch(max_queries: max_queries, &wrapper)
end
def graphql_query_for(name, attributes = {}, fields = nil)
@@ -157,7 +160,13 @@ module GraphqlHelpers
def attributes_to_graphql(attributes)
attributes.map do |name, value|
- "#{GraphqlHelpers.fieldnamerize(name.to_s)}: \"#{value}\""
+ value_str = if value.is_a?(Array)
+ '["' + value.join('","') + '"]'
+ else
+ "\"#{value}\""
+ end
+
+ "#{GraphqlHelpers.fieldnamerize(name.to_s)}: #{value_str}"
end.join(", ")
end
@@ -282,6 +291,12 @@ module GraphqlHelpers
def allow_high_graphql_recursion
allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::RecursionAnalyzer).to receive(:recursion_threshold).and_return 1000
end
+
+ def node_array(data, extract_attribute = nil)
+ data.map do |item|
+ extract_attribute ? item['node'][extract_attribute] : item['node']
+ end
+ end
end
# This warms our schema, doing this as part of loading the helpers to avoid
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index e74dbca4f93..677aef57661 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -16,7 +16,7 @@ module KubernetesHelpers
end
def kube_logs_response
- kube_response(kube_logs_body)
+ { body: kube_logs_body }
end
def kube_deployments_response
@@ -319,10 +319,10 @@ module KubernetesHelpers
}
end
- def kube_knative_services_body(legacy_knative: false, **options)
+ def kube_knative_services_body(**options)
{
"kind" => "List",
- "items" => [legacy_knative ? knative_05_service(options) : kube_service(options)]
+ "items" => [knative_07_service(options)]
}
end
@@ -398,77 +398,171 @@ module KubernetesHelpers
}
end
- def kube_service(name: "kubetest", namespace: "default", domain: "example.com")
- {
- "metadata" => {
- "creationTimestamp" => "2018-11-21T06:16:33Z",
- "name" => name,
- "namespace" => namespace,
- "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}"
- },
+ # noinspection RubyStringKeysInHashInspection
+ def knative_06_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ { "apiVersion" => "serving.knative.dev/v1alpha1",
+ "kind" => "Service",
+ "metadata" =>
+ { "annotations" =>
+ { "serving.knative.dev/creator" => "system:serviceaccount:#{namespace}:#{namespace}-service-account",
+ "serving.knative.dev/lastModifier" => "system:serviceaccount:#{namespace}:#{namespace}-service-account" },
+ "creationTimestamp" => "2019-10-22T21:19:20Z",
+ "generation" => 1,
+ "labels" => { "service" => name },
+ "name" => name,
+ "namespace" => namespace,
+ "resourceVersion" => "6042",
+ "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}",
+ "uid" => "9c7f63d0-f511-11e9-8815-42010a80002f" },
"spec" => {
- "generation" => 2
+ "runLatest" => {
+ "configuration" => {
+ "revisionTemplate" => {
+ "metadata" => {
+ "annotations" => { "Description" => description },
+ "creationTimestamp" => "2019-10-22T21:19:20Z",
+ "labels" => { "service" => name }
+ },
+ "spec" => {
+ "container" => {
+ "env" => [{ "name" => "timestamp", "value" => "2019-10-22 21:19:20" }],
+ "image" => "image_name",
+ "name" => "",
+ "resources" => {}
+ },
+ "timeoutSeconds" => 300
+ }
+ }
+ }
+ }
},
"status" => {
- "url" => "http://#{name}.#{namespace}.#{domain}",
"address" => {
- "url" => "#{name}.#{namespace}.svc.cluster.local"
+ "hostname" => "#{name}.#{namespace}.svc.cluster.local",
+ "url" => "http://#{name}.#{namespace}.svc.cluster.local"
},
- "latestCreatedRevisionName" => "#{name}-00002",
- "latestReadyRevisionName" => "#{name}-00002",
- "observedGeneration" => 2
- }
- }
- end
-
- def knative_05_service(name: "kubetest", namespace: "default", domain: "example.com")
- {
- "metadata" => {
- "creationTimestamp" => "2018-11-21T06:16:33Z",
- "name" => name,
- "namespace" => namespace,
- "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}"
- },
- "spec" => {
- "generation" => 2
- },
- "status" => {
+ "conditions" =>
+ [{ "lastTransitionTime" => "2019-10-22T21:20:25Z", "status" => "True", "type" => "ConfigurationsReady" },
+ { "lastTransitionTime" => "2019-10-22T21:20:25Z", "status" => "True", "type" => "Ready" },
+ { "lastTransitionTime" => "2019-10-22T21:20:25Z", "status" => "True", "type" => "RoutesReady" }],
"domain" => "#{name}.#{namespace}.#{domain}",
"domainInternal" => "#{name}.#{namespace}.svc.cluster.local",
- "latestCreatedRevisionName" => "#{name}-00002",
- "latestReadyRevisionName" => "#{name}-00002",
- "observedGeneration" => 2
- }
- }
- end
-
- def kube_service_full(name: "kubetest", namespace: "kube-ns", domain: "example.com")
- {
- "metadata" => {
- "creationTimestamp" => "2018-11-21T06:16:33Z",
- "name" => name,
- "namespace" => namespace,
- "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}",
- "annotation" => {
- "description" => "This is a test description"
- }
+ "latestCreatedRevisionName" => "#{name}-bskx6",
+ "latestReadyRevisionName" => "#{name}-bskx6",
+ "observedGeneration" => 1,
+ "traffic" => [{ "latestRevision" => true, "percent" => 100, "revisionName" => "#{name}-bskx6" }],
+ "url" => "http://#{name}.#{namespace}.#{domain}"
},
+ "environment_scope" => environment,
+ "cluster_id" => 9,
+ "podcount" => 0 }
+ end
+
+ # noinspection RubyStringKeysInHashInspection
+ def knative_07_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ { "apiVersion" => "serving.knative.dev/v1alpha1",
+ "kind" => "Service",
+ "metadata" =>
+ { "annotations" =>
+ { "serving.knative.dev/creator" => "system:serviceaccount:#{namespace}:#{namespace}-service-account",
+ "serving.knative.dev/lastModifier" => "system:serviceaccount:#{namespace}:#{namespace}-service-account" },
+ "creationTimestamp" => "2019-10-22T21:19:13Z",
+ "generation" => 1,
+ "labels" => { "service" => name },
+ "name" => name,
+ "namespace" => namespace,
+ "resourceVersion" => "289726",
+ "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}",
+ "uid" => "988349fa-f511-11e9-9ea1-42010a80005e" },
"spec" => {
- "generation" => 2,
- "build" => {
- "template" => "go-1.10.3"
+ "template" => {
+ "metadata" => {
+ "annotations" => { "Description" => description },
+ "creationTimestamp" => "2019-10-22T21:19:12Z",
+ "labels" => { "service" => name }
+ },
+ "spec" => {
+ "containers" => [{
+ "env" =>
+ [{ "name" => "timestamp", "value" => "2019-10-22 21:19:12" }],
+ "image" => "image_name",
+ "name" => "user-container",
+ "resources" => {}
+ }],
+ "timeoutSeconds" => 300
+ }
+ },
+ "traffic" => [{ "latestRevision" => true, "percent" => 100 }]
+ },
+ "status" =>
+ { "address" => { "url" => "http://#{name}.#{namespace}.svc.cluster.local" },
+ "conditions" =>
+ [{ "lastTransitionTime" => "2019-10-22T21:20:15Z", "status" => "True", "type" => "ConfigurationsReady" },
+ { "lastTransitionTime" => "2019-10-22T21:20:15Z", "status" => "True", "type" => "Ready" },
+ { "lastTransitionTime" => "2019-10-22T21:20:15Z", "status" => "True", "type" => "RoutesReady" }],
+ "latestCreatedRevisionName" => "#{name}-92tsj",
+ "latestReadyRevisionName" => "#{name}-92tsj",
+ "observedGeneration" => 1,
+ "traffic" => [{ "latestRevision" => true, "percent" => 100, "revisionName" => "#{name}-92tsj" }],
+ "url" => "http://#{name}.#{namespace}.#{domain}" },
+ "environment_scope" => environment,
+ "cluster_id" => 5,
+ "podcount" => 0 }
+ end
+
+ # noinspection RubyStringKeysInHashInspection
+ def knative_05_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ { "apiVersion" => "serving.knative.dev/v1alpha1",
+ "kind" => "Service",
+ "metadata" =>
+ { "annotations" =>
+ { "serving.knative.dev/creator" => "system:serviceaccount:#{namespace}:#{namespace}-service-account",
+ "serving.knative.dev/lastModifier" => "system:serviceaccount:#{namespace}:#{namespace}-service-account" },
+ "creationTimestamp" => "2019-10-22T21:19:19Z",
+ "generation" => 1,
+ "labels" => { "service" => name },
+ "name" => name,
+ "namespace" => namespace,
+ "resourceVersion" => "330390",
+ "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}",
+ "uid" => "9c710da6-f511-11e9-9ba0-42010a800161" },
+ "spec" => {
+ "runLatest" => {
+ "configuration" => {
+ "revisionTemplate" => {
+ "metadata" => {
+ "annotations" => { "Description" => description },
+ "creationTimestamp" => "2019-10-22T21:19:19Z",
+ "labels" => { "service" => name }
+ },
+ "spec" => {
+ "container" => {
+ "env" => [{ "name" => "timestamp", "value" => "2019-10-22 21:19:19" }],
+ "image" => "image_name",
+ "name" => "",
+ "resources" => { "requests" => { "cpu" => "400m" } }
+ },
+ "timeoutSeconds" => 300
+ }
+ }
+ }
}
},
- "status" => {
- "url" => "http://#{name}.#{namespace}.#{domain}",
- "address" => {
- "url" => "#{name}.#{namespace}.svc.cluster.local"
- },
- "latestCreatedRevisionName" => "#{name}-00002",
- "latestReadyRevisionName" => "#{name}-00002",
- "observedGeneration" => 2
- }
- }
+ "status" =>
+ { "address" => { "hostname" => "#{name}.#{namespace}.svc.cluster.local" },
+ "conditions" =>
+ [{ "lastTransitionTime" => "2019-10-22T21:20:24Z", "status" => "True", "type" => "ConfigurationsReady" },
+ { "lastTransitionTime" => "2019-10-22T21:20:24Z", "status" => "True", "type" => "Ready" },
+ { "lastTransitionTime" => "2019-10-22T21:20:24Z", "status" => "True", "type" => "RoutesReady" }],
+ "domain" => "#{name}.#{namespace}.#{domain}",
+ "domainInternal" => "#{name}.#{namespace}.svc.cluster.local",
+ "latestCreatedRevisionName" => "#{name}-58qgr",
+ "latestReadyRevisionName" => "#{name}-58qgr",
+ "observedGeneration" => 1,
+ "traffic" => [{ "percent" => 100, "revisionName" => "#{name}-58qgr" }] },
+ "environment_scope" => environment,
+ "cluster_id" => 8,
+ "podcount" => 0 }
end
def kube_terminals(service, pod)
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 7d5896e4eeb..1d42f26ad3e 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -53,7 +53,7 @@ module LoginHelpers
fill_in 'password', with: user.password
- click_button 'Enter admin mode'
+ click_button 'Enter Admin Mode'
end
def gitlab_sign_in_via(provider, user, uid, saml_response = nil)
diff --git a/spec/support/helpers/smime_helper.rb b/spec/support/helpers/smime_helper.rb
index 656b3e196ba..3ad19cd3da0 100644
--- a/spec/support/helpers/smime_helper.rb
+++ b/spec/support/helpers/smime_helper.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module SmimeHelper
include OpenSSL
diff --git a/spec/support/helpers/stub_experiments.rb b/spec/support/helpers/stub_experiments.rb
index ed868e22c6e..7a5a188ab4d 100644
--- a/spec/support/helpers/stub_experiments.rb
+++ b/spec/support/helpers/stub_experiments.rb
@@ -9,7 +9,19 @@ module StubExperiments
# - `stub_experiment(signup_flow: false)` ... Disable `signup_flow` experiment globally.
def stub_experiment(experiments)
experiments.each do |experiment_key, enabled|
- allow(Gitlab::Experimentation).to receive(:enabled?).with(experiment_key, any_args) { enabled }
+ allow(Gitlab::Experimentation).to receive(:enabled?).with(experiment_key) { enabled }
+ end
+ end
+
+ # Stub Experiment for user with `key: true/false`
+ #
+ # @param [Hash] experiment where key is feature name and value is boolean whether enabled or not.
+ #
+ # Examples
+ # - `stub_experiment_for_user(signup_flow: false)` ... Disable `signup_flow` experiment for user.
+ def stub_experiment_for_user(experiments)
+ experiments.each do |experiment_key, enabled|
+ allow(Gitlab::Experimentation).to receive(:enabled_for_user?).with(experiment_key, anything) { enabled }
end
end
end
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index e3dde888277..fe343da7838 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -18,8 +18,13 @@ module StubGitlabCalls
stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
end
- def stub_ci_pipeline_yaml_file(ci_yaml)
- allow_any_instance_of(Ci::Pipeline).to receive(:ci_yaml_file) { ci_yaml }
+ def stub_ci_pipeline_yaml_file(ci_yaml_content)
+ allow_any_instance_of(Repository).to receive(:gitlab_ci_yml_for).and_return(ci_yaml_content)
+
+ # Ensure we don't hit auto-devops when config not found in repository
+ unless ci_yaml_content
+ allow_any_instance_of(Project).to receive(:auto_devops_enabled?).and_return(false)
+ end
end
def stub_pipeline_modified_paths(pipeline, modified_paths)
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index a409dd2ef26..6a23875f103 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -148,8 +148,6 @@ module TestEnv
end
def setup_gitaly
- socket_path = Gitlab::GitalyClient.address('default').sub(/\Aunix:/, '')
- gitaly_dir = File.dirname(socket_path)
install_gitaly_args = [gitaly_dir, repos_path, gitaly_url].compact.join(',')
component_timed_setup('Gitaly',
@@ -162,8 +160,16 @@ module TestEnv
end
end
+ def gitaly_socket_path
+ Gitlab::GitalyClient.address('default').sub(/\Aunix:/, '')
+ end
+
+ def gitaly_dir
+ File.dirname(gitaly_socket_path)
+ end
+
def start_gitaly(gitaly_dir)
- if ENV['CI'].present?
+ if ci?
# Gitaly has been spawned outside this process already
return
end
@@ -172,8 +178,13 @@ module TestEnv
spawn_script = Rails.root.join('scripts/gitaly-test-spawn').to_s
Bundler.with_original_env do
- raise "gitaly spawn failed" unless system(spawn_script)
+ unless system(spawn_script)
+ message = 'gitaly spawn failed'
+ message += " (try `rm -rf #{gitaly_dir}` ?)" unless ci?
+ raise message
+ end
end
+
@gitaly_pid = Integer(File.read('tmp/tests/gitaly.pid'))
Kernel.at_exit { stop_gitaly }
@@ -243,6 +254,22 @@ module TestEnv
FileUtils.chmod_R 0755, target_repo_path
end
+ def rm_storage_dir(storage, dir)
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ repos_path = Gitlab.config.repositories.storages[storage].legacy_disk_path
+ target_repo_refs_path = File.join(repos_path, dir)
+ FileUtils.remove_dir(target_repo_refs_path)
+ end
+ rescue Errno::ENOENT
+ end
+
+ def storage_dir_exists?(storage, dir)
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ repos_path = Gitlab.config.repositories.storages[storage].legacy_disk_path
+ File.exist?(File.join(repos_path, dir))
+ end
+ end
+
def create_bare_repository(path)
FileUtils.mkdir_p(path)
@@ -370,7 +397,7 @@ module TestEnv
ensure_component_dir_name_is_correct!(component, install_dir)
# On CI, once installed, components never need update
- return if File.exist?(install_dir) && ENV['CI']
+ return if File.exist?(install_dir) && ci?
if component_needs_update?(install_dir, version)
# Cleanup the component entirely to ensure we start fresh
@@ -391,6 +418,10 @@ module TestEnv
puts " #{component} set up in #{Time.now - start} seconds...\n"
end
+ def ci?
+ ENV['CI'].present?
+ end
+
def ensure_component_dir_name_is_correct!(component, path)
actual_component_dir_name = File.basename(path)
expected_component_dir_name = component.parameterize
diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb
index ac6840dbcfc..4e149c9fa54 100644
--- a/spec/support/import_export/common_util.rb
+++ b/spec/support/import_export/common_util.rb
@@ -8,5 +8,12 @@ module ImportExport
File.open("#{tmpdir}/test", 'w') { |file| file.write("test") }
FileUtils.ln_s("#{tmpdir}/test", "#{tmpdir}/#{symlink_name}")
end
+
+ def setup_import_export_config(name, prefix = nil)
+ export_path = [prefix, 'spec', 'fixtures', 'lib', 'gitlab', 'import_export', name].compact
+ export_path = File.join(*export_path)
+
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:export_path) { export_path }
+ end
end
end
diff --git a/spec/support/matchers/access_matchers_for_request.rb b/spec/support/matchers/access_matchers_for_request.rb
new file mode 100644
index 00000000000..9b80bf8562c
--- /dev/null
+++ b/spec/support/matchers/access_matchers_for_request.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+# AccessMatchersForRequest
+#
+# Matchers to test the access permissions for requests specs (most useful for API tests).
+module AccessMatchersForRequest
+ extend RSpec::Matchers::DSL
+ include AccessMatchersHelpers
+
+ EXPECTED_STATUS_CODES_ALLOWED = [200, 201, 204, 302, 304].freeze
+ EXPECTED_STATUS_CODES_DENIED = [401, 403, 404].freeze
+
+ def description_for(role, type, expected, result)
+ "be #{type} for #{role} role. Expected status code: any of #{expected.join(', ')} Got: #{result}"
+ end
+
+ matcher :be_allowed_for do |role|
+ match do |action|
+ # methods called in this and negated block are being run in context of ExampleGroup
+ # (not matcher) instance so we have to pass data via local vars
+
+ run_matcher(action, role, @membership, @owned_objects)
+
+ EXPECTED_STATUS_CODES_ALLOWED.include?(response.status)
+ end
+
+ match_when_negated do |action|
+ run_matcher(action, role, @membership, @owned_objects)
+
+ EXPECTED_STATUS_CODES_DENIED.include?(response.status)
+ end
+
+ chain :of do |membership|
+ @membership = membership
+ end
+
+ chain :own do |*owned_objects|
+ @owned_objects = owned_objects
+ end
+
+ failure_message do
+ "expected this action to #{description_for(role, 'allowed', EXPECTED_STATUS_CODES_ALLOWED, response.status)}"
+ end
+
+ failure_message_when_negated do
+ "expected this action to #{description_for(role, 'denied', EXPECTED_STATUS_CODES_DENIED, response.status)}"
+ end
+
+ supports_block_expectations
+ end
+
+ RSpec::Matchers.define_negated_matcher :be_denied_for, :be_allowed_for
+end
diff --git a/spec/support/matchers/access_matchers_generic.rb b/spec/support/matchers/access_matchers_generic.rb
new file mode 100644
index 00000000000..13955750f4f
--- /dev/null
+++ b/spec/support/matchers/access_matchers_generic.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+# AccessMatchersGeneric
+#
+# Matchers to test the access permissions for service classes or other generic pieces of business logic.
+module AccessMatchersGeneric
+ extend RSpec::Matchers::DSL
+ include AccessMatchersHelpers
+
+ ERROR_CLASS = Gitlab::Access::AccessDeniedError
+
+ def error_message(error)
+ str = error.class.name
+ str += ": #{error.message}" if error.message != error.class.name
+ str
+ end
+
+ def error_expectation_message(allowed, error)
+ if allowed
+ "Expected to raise nothing but #{error_message(error)} was raised."
+ else
+ "Expected to raise #{ERROR_CLASS} but nothing was raised."
+ end
+ end
+
+ def description_for(role, type, error)
+ allowed = type == 'allowed'
+ "be #{type} for #{role} role. #{error_expectation_message(allowed, error)}"
+ end
+
+ matcher :be_allowed_for do |role|
+ match do |action|
+ # methods called in this and negated block are being run in context of ExampleGroup
+ # (not matcher) instance so we have to pass data via local vars
+
+ run_matcher(action, role, @membership, @owned_objects) do |action|
+ action.call
+ rescue => e
+ @error = e
+ raise unless e.is_a?(ERROR_CLASS)
+ end
+
+ @error.nil?
+ end
+
+ chain :of do |membership|
+ @membership = membership
+ end
+
+ chain :own do |*owned_objects|
+ @owned_objects = owned_objects
+ end
+
+ failure_message do
+ "expected this action to #{description_for(role, 'allowed', @error)}"
+ end
+
+ failure_message_when_negated do
+ "expected this action to #{description_for(role, 'denied', @error)}"
+ end
+
+ supports_block_expectations
+ end
+
+ RSpec::Matchers.define_negated_matcher :be_denied_for, :be_allowed_for
+end
diff --git a/spec/support/matchers/db_schema_matchers.rb b/spec/support/matchers/db_schema_matchers.rb
new file mode 100644
index 00000000000..55843b7bb49
--- /dev/null
+++ b/spec/support/matchers/db_schema_matchers.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+EXPECTED_SMALLINT_LIMIT = 2
+
+RSpec::Matchers.define :use_smallint_for_enums do |enums|
+ match do |actual|
+ @failing_enums = enums.select do |enum|
+ enum_type = actual.type_for_attribute(enum)
+ actual_limit = enum_type.send(:subtype).limit
+ actual_limit != EXPECTED_SMALLINT_LIMIT
+ end
+ @failing_enums.empty?
+ end
+
+ failure_message do
+ <<~FAILURE_MESSAGE
+ Expected #{actual.name} enums: #{failing_enums.join(', ')} to use the smallint type.
+
+ The smallint type is 2 bytes which is more than sufficient for an enum.
+ Using the smallint type would help us save space in the database.
+ To fix this, please add `limit: 2` in the migration file, for example:
+
+ def change
+ add_column :ci_job_artifacts, :file_format, :integer, limit: 2
+ end
+ FAILURE_MESSAGE
+ end
+
+ def failing_enums
+ @failing_enums ||= []
+ end
+end
diff --git a/spec/support/prepare-gitlab-git-test-for-commit b/spec/support/prepare-gitlab-git-test-for-commit
index d08e3ba5481..77c7f309312 100755
--- a/spec/support/prepare-gitlab-git-test-for-commit
+++ b/spec/support/prepare-gitlab-git-test-for-commit
@@ -1,4 +1,5 @@
#!/usr/bin/env ruby
+# frozen_string_literal: true
abort unless [
system('spec/support/generate-seed-repo-rb', out: 'spec/support/helpers/seed_repo.rb'),
diff --git a/spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb b/spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb
new file mode 100644
index 00000000000..c11448ffe0f
--- /dev/null
+++ b/spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+shared_examples 'aborted merge requests for MWPS' do
+ let(:aborted_message) do
+ /aborted the automatic merge because target branch was updated/
+ end
+
+ it 'aborts auto_merge' do
+ expect(merge_request.auto_merge_enabled?).to be_falsey
+ expect(merge_request.notes.last.note).to match(aborted_message)
+ end
+
+ it 'removes merge_user' do
+ expect(merge_request.merge_user).to be_nil
+ end
+
+ it 'does not add todos for merge user' do
+ expect(user.todos.for_target(merge_request)).to be_empty
+ end
+
+ it 'adds todos for merge author' do
+ expect(author.todos.for_target(merge_request)).to be_present.and be_all(&:pending?)
+ end
+end
+
+shared_examples 'maintained merge requests for MWPS' do
+ it 'does not cancel auto merge' do
+ expect(merge_request.auto_merge_enabled?).to be_truthy
+ expect(merge_request.notes).to be_empty
+ end
+
+ it 'does not change merge_user' do
+ expect(merge_request.merge_user).to eq(user)
+ end
+
+ it 'does not add todos' do
+ expect(author.todos.for_target(merge_request)).to be_empty
+ expect(user.todos.for_target(merge_request)).to be_empty
+ end
+end
diff --git a/spec/support/shared_examples/container_repositories_shared_examples.rb b/spec/support/shared_examples/container_repositories_shared_examples.rb
index 946b130fca2..b4f45ba9a00 100644
--- a/spec/support/shared_examples/container_repositories_shared_examples.rb
+++ b/spec/support/shared_examples/container_repositories_shared_examples.rb
@@ -56,3 +56,11 @@ shared_examples 'returns repositories for allowed users' do |user_type, scope|
end
end
end
+
+shared_examples 'a gitlab tracking event' do |category, action|
+ it "creates a gitlab tracking event #{action}" do
+ expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/cycle_analytics_event_shared_examples.rb b/spec/support/shared_examples/cycle_analytics_event_shared_examples.rb
index dce1dbe1cd1..028b8da94a6 100644
--- a/spec/support/shared_examples/cycle_analytics_event_shared_examples.rb
+++ b/spec/support/shared_examples/cycle_analytics_event_shared_examples.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
shared_examples_for 'cycle analytics event' do
- let(:instance) { described_class.new({}) }
+ let(:params) { {} }
+ let(:instance) { described_class.new(params) }
it { expect(described_class.name).to be_a_kind_of(String) }
it { expect(described_class.identifier).to be_a_kind_of(Symbol) }
diff --git a/spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb
index afa035d039a..c781f72ff11 100644
--- a/spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb
@@ -10,6 +10,11 @@ shared_examples_for 'cycle analytics stage' do
}
end
+ describe 'associations' do
+ it { is_expected.to belong_to(:end_event_label) }
+ it { is_expected.to belong_to(:start_event_label) }
+ end
+
describe 'validation' do
it 'is valid' do
expect(described_class.new(valid_params)).to be_valid
@@ -18,22 +23,22 @@ shared_examples_for 'cycle analytics stage' do
it 'validates presence of parent' do
stage = described_class.new(valid_params.except(:parent))
- expect(stage).not_to be_valid
- expect(stage.errors.details[parent_name]).to eq([{ error: :blank }])
+ expect(stage).to be_invalid
+ expect(stage.errors[parent_name]).to include("can't be blank")
end
it 'validates presence of start_event_identifier' do
stage = described_class.new(valid_params.except(:start_event_identifier))
- expect(stage).not_to be_valid
- expect(stage.errors.details[:start_event_identifier]).to eq([{ error: :blank }])
+ expect(stage).to be_invalid
+ expect(stage.errors[:start_event_identifier]).to include("can't be blank")
end
it 'validates presence of end_event_identifier' do
stage = described_class.new(valid_params.except(:end_event_identifier))
- expect(stage).not_to be_valid
- expect(stage.errors.details[:end_event_identifier]).to eq([{ error: :blank }])
+ expect(stage).to be_invalid
+ expect(stage.errors[:end_event_identifier]).to include("can't be blank")
end
it 'is invalid when end_event is not allowed for the given start_event' do
@@ -43,8 +48,8 @@ shared_examples_for 'cycle analytics stage' do
)
stage = described_class.new(invalid_params)
- expect(stage).not_to be_valid
- expect(stage.errors.details[:end_event]).to eq([{ error: :not_allowed_for_the_given_start_event }])
+ expect(stage).to be_invalid
+ expect(stage.errors[:end_event]).to include(s_('CycleAnalytics|not allowed for the given start event'))
end
context 'disallows default stage names when creating custom stage' do
@@ -105,3 +110,119 @@ shared_examples_for 'cycle analytics stage' do
end
end
end
+
+shared_examples_for 'cycle analytics label based stage' do
+ context 'when creating label based event' do
+ context 'when the label id is not passed' do
+ it 'returns validation error when `start_event_label_id` is missing' do
+ stage = described_class.new({
+ name: 'My Stage',
+ parent: parent,
+ start_event_identifier: :issue_label_added,
+ end_event_identifier: :issue_closed
+ })
+
+ expect(stage).to be_invalid
+ expect(stage.errors[:start_event_label]).to include("can't be blank")
+ end
+
+ it 'returns validation error when `end_event_label_id` is missing' do
+ stage = described_class.new({
+ name: 'My Stage',
+ parent: parent,
+ start_event_identifier: :issue_closed,
+ end_event_identifier: :issue_label_added
+ })
+
+ expect(stage).to be_invalid
+ expect(stage.errors[:end_event_label]).to include("can't be blank")
+ end
+ end
+
+ context 'when group label is defined on the root group' do
+ it 'succeeds' do
+ stage = described_class.new({
+ name: 'My Stage',
+ parent: parent,
+ start_event_identifier: :issue_label_added,
+ start_event_label: group_label,
+ end_event_identifier: :issue_closed
+ })
+
+ expect(stage).to be_valid
+ end
+ end
+
+ context 'when subgroup is given' do
+ it 'succeeds' do
+ stage = described_class.new({
+ name: 'My Stage',
+ parent: parent_in_subgroup,
+ start_event_identifier: :issue_label_added,
+ start_event_label: group_label,
+ end_event_identifier: :issue_closed
+ })
+
+ expect(stage).to be_valid
+ end
+ end
+
+ context 'when label is defined for a different group' do
+ let(:error_message) { s_('CycleAnalyticsStage|is not available for the selected group') }
+
+ it 'returns validation for `start_event_label`' do
+ stage = described_class.new({
+ name: 'My Stage',
+ parent: parent_outside_of_group_label_scope,
+ start_event_identifier: :issue_label_added,
+ start_event_label: group_label,
+ end_event_identifier: :issue_closed
+ })
+
+ expect(stage).to be_invalid
+ expect(stage.errors[:start_event_label]).to include(error_message)
+ end
+
+ it 'returns validation for `end_event_label`' do
+ stage = described_class.new({
+ name: 'My Stage',
+ parent: parent_outside_of_group_label_scope,
+ start_event_identifier: :issue_closed,
+ end_event_identifier: :issue_label_added,
+ end_event_label: group_label
+ })
+
+ expect(stage).to be_invalid
+ expect(stage.errors[:end_event_label]).to include(error_message)
+ end
+ end
+
+ context 'when `ProjectLabel is given' do
+ let_it_be(:label) { create(:label) }
+
+ it 'raises error when `ProjectLabel` is given for `start_event_label`' do
+ params = {
+ name: 'My Stage',
+ parent: parent,
+ start_event_identifier: :issue_label_added,
+ start_event_label: label,
+ end_event_identifier: :issue_closed
+ }
+
+ expect { described_class.new(params) }.to raise_error(ActiveRecord::AssociationTypeMismatch)
+ end
+
+ it 'raises error when `ProjectLabel` is given for `end_event_label`' do
+ params = {
+ name: 'My Stage',
+ parent: parent,
+ start_event_identifier: :issue_closed,
+ end_event_identifier: :issue_label_added,
+ end_event_label: label
+ }
+
+ expect { described_class.new(params) }.to raise_error(ActiveRecord::AssociationTypeMismatch)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb b/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb
index 920fcbde483..21c32c9c04a 100644
--- a/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb
+++ b/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
shared_examples 'archive download buttons' do
- let(:formats) { %w(zip tar.gz tar.bz2 tar) }
let(:path_to_visit) { project_path(project) }
let(:ref) { project.default_branch }
@@ -13,7 +12,7 @@ shared_examples 'archive download buttons' do
context 'private project' do
it 'shows archive download buttons with external storage URL prepended and user token appended to their href' do
- formats.each do |format|
+ Gitlab::Workhorse::ARCHIVE_FORMATS.each do |format|
path = archive_path(project, ref, format)
uri = URI('https://cdn.gitlab.com')
uri.path = path
@@ -28,7 +27,7 @@ shared_examples 'archive download buttons' do
let(:project) { create(:project, :repository, :public) }
it 'shows archive download buttons with external storage URL prepended to their href' do
- formats.each do |format|
+ Gitlab::Workhorse::ARCHIVE_FORMATS.each do |format|
path = archive_path(project, ref, format)
uri = URI('https://cdn.gitlab.com')
uri.path = path
@@ -45,7 +44,7 @@ shared_examples 'archive download buttons' do
end
it 'shows default archive download buttons' do
- formats.each do |format|
+ Gitlab::Workhorse::ARCHIVE_FORMATS.each do |format|
path = archive_path(project, ref, format)
expect(page).to have_link format, href: path
diff --git a/spec/support/shared_examples/file_finder.rb b/spec/support/shared_examples/file_finder.rb
index 984a06ccd1a..f4b28b94090 100644
--- a/spec/support/shared_examples/file_finder.rb
+++ b/spec/support/shared_examples/file_finder.rb
@@ -4,19 +4,19 @@ shared_examples 'file finder' do
let(:query) { 'files' }
let(:search_results) { subject.find(query) }
- it 'finds by name' do
- blob = search_results.find { |blob| blob.filename == expected_file_by_name }
+ it 'finds by path' do
+ blob = search_results.find { |blob| blob.path == expected_file_by_path }
- expect(blob.filename).to eq(expected_file_by_name)
+ expect(blob.path).to eq(expected_file_by_path)
expect(blob).to be_a(Gitlab::Search::FoundBlob)
expect(blob.ref).to eq(subject.ref)
expect(blob.data).not_to be_empty
end
it 'finds by content' do
- blob = search_results.find { |blob| blob.filename == expected_file_by_content }
+ blob = search_results.find { |blob| blob.path == expected_file_by_content }
- expect(blob.filename).to eq(expected_file_by_content)
+ expect(blob.path).to eq(expected_file_by_content)
expect(blob).to be_a(Gitlab::Search::FoundBlob)
expect(blob.ref).to eq(subject.ref)
expect(blob.data).not_to be_empty
diff --git a/spec/support/shared_examples/graphql/connection_paged_nodes.rb b/spec/support/shared_examples/graphql/connection_paged_nodes.rb
new file mode 100644
index 00000000000..830d2d2d4b1
--- /dev/null
+++ b/spec/support/shared_examples/graphql/connection_paged_nodes.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'connection with paged nodes' do
+ it 'returns the collection limited to max page size' do
+ expect(paged_nodes.size).to eq(3)
+ end
+
+ it 'is a loaded memoized array' do
+ expect(paged_nodes).to be_an(Array)
+ expect(paged_nodes.object_id).to eq(paged_nodes.object_id)
+ end
+
+ context 'when `first` is passed' do
+ let(:arguments) { { first: 2 } }
+
+ it 'returns only the first elements' do
+ expect(paged_nodes).to contain_exactly(all_nodes.first, all_nodes.second)
+ end
+ end
+
+ context 'when `last` is passed' do
+ let(:arguments) { { last: 2 } }
+
+ it 'returns only the last elements' do
+ expect(paged_nodes).to contain_exactly(all_nodes[3], all_nodes[4])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/sort_enum_shared_examples.rb b/spec/support/shared_examples/graphql/sort_enum_shared_examples.rb
new file mode 100644
index 00000000000..becea9bcae1
--- /dev/null
+++ b/spec/support/shared_examples/graphql/sort_enum_shared_examples.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'common sort values' do
+ it 'exposes all the existing common sort values' do
+ expect(described_class.values.keys).to include(*%w[updated_desc updated_asc created_desc created_asc])
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/config/entry/key_validations_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/config/entry/key_validations_shared_examples.rb
new file mode 100644
index 00000000000..b0b3e46332d
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/ci/config/entry/key_validations_shared_examples.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'key entry validations' do |config_name|
+ shared_examples 'key with slash' do
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+
+ it 'reports errors with config value' do
+ expect(entry.errors).to include "#{config_name} config cannot contain the \"/\" character"
+ end
+ end
+
+ shared_examples 'key with only dots' do
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+
+ it 'reports errors with config value' do
+ expect(entry.errors).to include "#{config_name} config cannot be \".\" or \"..\""
+ end
+ end
+
+ context 'when entry value contains slash' do
+ let(:config) { 'key/with/some/slashes' }
+
+ it_behaves_like 'key with slash'
+ end
+
+ context 'when entry value contains URI encoded slash (%2F)' do
+ let(:config) { 'key%2Fwith%2Fsome%2Fslashes' }
+
+ it_behaves_like 'key with slash'
+ end
+
+ context 'when entry value is a dot' do
+ let(:config) { '.' }
+
+ it_behaves_like 'key with only dots'
+ end
+
+ context 'when entry value is two dots' do
+ let(:config) { '..' }
+
+ it_behaves_like 'key with only dots'
+ end
+
+ context 'when entry value is a URI encoded dot (%2E)' do
+ let(:config) { '%2e' }
+
+ it_behaves_like 'key with only dots'
+ end
+
+ context 'when entry value is two URI encoded dots (%2E)' do
+ let(:config) { '%2E%2e' }
+
+ it_behaves_like 'key with only dots'
+ end
+
+ context 'when entry value is one dot and one URI encoded dot' do
+ let(:config) { '.%2e' }
+
+ it_behaves_like 'key with only dots'
+ end
+
+ context 'when key is a string' do
+ let(:config) { 'test' }
+
+ describe '#value' do
+ it 'returns key value' do
+ expect(entry.value).to eq 'test'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
new file mode 100644
index 00000000000..556d81133bc
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'with inheritable CI config' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:ignored_inheritable_columns) { [] }
+
+ it 'does prepend an Inheritable mixin' do
+ expect(described_class).to include_module(Gitlab::Config::Entry::Inheritable)
+ end
+
+ it 'all inheritable entries are covered' do
+ inheritable_entries = inheritable_class.nodes.keys
+ entries = described_class.nodes.keys
+
+ expect(entries + ignored_inheritable_columns).to include(
+ *inheritable_entries)
+ end
+
+ it 'all entries do have inherit flag' do
+ without_inherit_flag = described_class.nodes.map do |key, factory|
+ key if factory.inherit.nil?
+ end.compact
+
+ expect(without_inherit_flag).to be_empty
+ end
+
+ context 'for non-inheritable entries' do
+ where(:entry_key) do
+ described_class.nodes.map do |key, factory|
+ [key] unless factory.inherit
+ end.compact
+ end
+
+ with_them do
+ it 'inheritable_class does not define entry' do
+ expect(inheritable_class.nodes).not_to include(entry_key)
+ end
+ end
+ end
+
+ context 'for inheritable entries' do
+ where(:entry_key, :entry_class) do
+ described_class.nodes.map do |key, factory|
+ [key, factory.entry_class] if factory.inherit
+ end.compact
+ end
+
+ with_them do
+ let(:specified) { double('deps_specified', 'specified?' => true, value: 'specified') }
+ let(:unspecified) { double('unspecified', 'specified?' => false) }
+ let(:inheritable) { double(inheritable_key, '[]' => unspecified) }
+
+ let(:deps) do
+ if inheritable_key
+ double('deps', inheritable_key => inheritable, '[]' => unspecified)
+ else
+ inheritable
+ end
+ end
+
+ it 'inheritable_class does define entry' do
+ expect(inheritable_class.nodes).to include(entry_key)
+ expect(inheritable_class.nodes[entry_key].entry_class).to eq(entry_class)
+ end
+
+ context 'when is specified' do
+ it 'does inherit value' do
+ expect(inheritable).to receive('[]').with(entry_key).and_return(specified)
+
+ entry.compose!(deps)
+
+ expect(entry[entry_key]).to eq(specified)
+ end
+
+ context 'when entry is specified' do
+ let(:entry_specified) do
+ double('entry_specified', 'specified?' => true, value: 'specified', errors: [])
+ end
+
+ it 'does not inherit value' do
+ entry.send(:entries)[entry_key] = entry_specified
+
+ allow(inheritable).to receive('[]').with(entry_key).and_return(specified)
+
+ expect do
+ # we ignore exceptions as `#overwrite_entry`
+ # can raise exception on duplicates
+ entry.compose!(deps) rescue described_class::InheritError
+ end.not_to change { entry[entry_key] }
+ end
+ end
+ end
+
+ context 'when inheritable does not specify' do
+ it 'does not inherit value' do
+ entry.compose!(deps)
+
+ expect(entry[entry_key]).to be_a(
+ Gitlab::Config::Entry::Undefined)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb b/spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb
new file mode 100644
index 00000000000..80120629a32
--- /dev/null
+++ b/spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+# This pending test can be removed when `single_mr_diff_view` is enabled by default
+# disabling the feature flag above is then not needed anymore.
+RSpec.shared_examples 'rendering a single diff version' do |attribute|
+ pending 'allows editing diff settings single_mr_diff_view is enabled' do
+ project = create(:project, :repository)
+ user = project.creator
+ merge_request = create(:merge_request, source_project: project)
+ stub_feature_flags(single_mr_diff_view: true)
+ sign_in(user)
+
+ visit(diffs_project_merge_request_path(project, merge_request))
+
+ expect(page).to have_selector('.js-show-diff-settings')
+ end
+end
diff --git a/spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb b/spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb
index 7ddb3b11c85..1c8c19acc74 100644
--- a/spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb
@@ -8,10 +8,6 @@ shared_examples 'cluster application helm specs' do |application_name|
it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::DeleteCommand) }
- it 'has the application name' do
- expect(subject.name).to eq(application.name)
- end
-
it 'has files' do
expect(subject.files).to eq(application.files)
end
diff --git a/spec/support/shared_examples/models/concern/issuable_shared_examples.rb b/spec/support/shared_examples/models/concerns/issuable_shared_examples.rb
index 4978a403324..4978a403324 100644
--- a/spec/support/shared_examples/models/concern/issuable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/issuable_shared_examples.rb
diff --git a/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb b/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb
new file mode 100644
index 00000000000..c5c14901268
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+shared_examples 'model with redactable field' do
+ it 'redacts unsubscribe token' do
+ model[field] = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
+
+ model.save!
+
+ expect(model[field]).to eq 'some text /sent_notifications/REDACTED/unsubscribe more text'
+ end
+
+ it 'ignores not hexadecimal tokens' do
+ text = 'some text /sent_notifications/token/unsubscribe more text'
+ model[field] = text
+
+ model.save!
+
+ expect(model[field]).to eq text
+ end
+
+ it 'ignores not matching texts' do
+ text = 'some text /sent_notifications/.*/unsubscribe more text'
+ model[field] = text
+
+ model.save!
+
+ expect(model[field]).to eq text
+ end
+
+ it 'redacts the field when saving the model before creating markdown cache' do
+ model[field] = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
+
+ model.save!
+
+ expected = 'some text /sent_notifications/REDACTED/unsubscribe more text'
+ expect(model[field]).to eq expected
+ expect(model["#{field}_html"]).to eq "<p dir=\"auto\">#{expected}</p>"
+ end
+end
diff --git a/spec/support/shared_examples/models/with_uploads_shared_examples.rb b/spec/support/shared_examples/models/with_uploads_shared_examples.rb
index 822836c771e..3d622ba8195 100644
--- a/spec/support/shared_examples/models/with_uploads_shared_examples.rb
+++ b/spec/support/shared_examples/models/with_uploads_shared_examples.rb
@@ -18,7 +18,7 @@ shared_examples_for 'model with uploads' do |supports_fileuploads|
end
end
- context 'with not mounted uploads', :sidekiq, skip: !supports_fileuploads do
+ context 'with not mounted uploads', :sidekiq_might_not_need_inline, skip: !supports_fileuploads do
context 'with local files' do
let!(:uploads) { create_list(:upload, 2, uploader: FileUploader, model: model_object) }
diff --git a/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb
index b4a8e3fca4d..92bbc4abe77 100644
--- a/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb
@@ -2,22 +2,19 @@
shared_examples 'zoom quick actions' do
let(:zoom_link) { 'https://zoom.us/j/123456789' }
+ let(:existing_zoom_link) { 'https://zoom.us/j/123456780' }
let(:invalid_zoom_link) { 'https://invalid-zoom' }
- before do
- issue.update!(description: description)
- end
-
describe '/zoom' do
shared_examples 'skip silently' do
- it 'skip addition silently' do
+ it 'skips addition silently' do
add_note("/zoom #{zoom_link}")
wait_for_requests
expect(page).not_to have_content('Zoom meeting added')
expect(page).not_to have_content('Failed to add a Zoom meeting')
- expect(issue.reload.description).to eq(description)
+ expect(ZoomMeeting.canonical_meeting_url(issue.reload)).not_to eq(zoom_link)
end
end
@@ -28,13 +25,11 @@ shared_examples 'zoom quick actions' do
wait_for_requests
expect(page).to have_content('Zoom meeting added')
- expect(issue.reload.description).to end_with(zoom_link)
+ expect(ZoomMeeting.canonical_meeting_url(issue.reload)).to eq(zoom_link)
end
end
- context 'without issue description' do
- let(:description) { nil }
-
+ context 'without zoom_meetings' do
include_examples 'success'
it 'cannot add invalid zoom link' do
@@ -47,14 +42,18 @@ shared_examples 'zoom quick actions' do
end
end
- context 'with Zoom link not at the end of the issue description' do
- let(:description) { "A link #{zoom_link} not at the end" }
+ context 'with "removed" zoom meeting' do
+ before do
+ create(:zoom_meeting, issue_status: :removed, url: existing_zoom_link, issue: issue)
+ end
include_examples 'success'
end
- context 'with Zoom link at end of the issue description' do
- let(:description) { "Text\n#{zoom_link}" }
+ context 'with "added" zoom meeting' do
+ before do
+ create(:zoom_meeting, issue_status: :added, url: existing_zoom_link, issue: issue)
+ end
include_examples 'skip silently'
end
@@ -62,19 +61,19 @@ shared_examples 'zoom quick actions' do
describe '/remove_zoom' do
shared_examples 'skip silently' do
- it 'skip removal silently' do
+ it 'skips removal silently' do
add_note('/remove_zoom')
wait_for_requests
expect(page).not_to have_content('Zoom meeting removed')
expect(page).not_to have_content('Failed to remove a Zoom meeting')
- expect(issue.reload.description).to eq(description)
+ expect(ZoomMeeting.canonical_meeting_url(issue.reload)).to be_nil
end
end
- context 'with Zoom link in the description' do
- let(:description) { "Text with #{zoom_link}\n\n\n#{zoom_link}" }
+ context 'with added zoom meeting' do
+ let!(:added_zoom_meeting) { create(:zoom_meeting, url: zoom_link, issue: issue, issue_status: :added) }
it 'removes last Zoom link' do
add_note('/remove_zoom')
@@ -82,14 +81,8 @@ shared_examples 'zoom quick actions' do
wait_for_requests
expect(page).to have_content('Zoom meeting removed')
- expect(issue.reload.description).to eq("Text with #{zoom_link}")
+ expect(ZoomMeeting.canonical_meeting_url(issue.reload)).to be_nil
end
end
-
- context 'with a Zoom link not at the end of the description' do
- let(:description) { "A link #{zoom_link} not at the end" }
-
- include_examples 'skip silently'
- end
end
end
diff --git a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
index ac7c17915de..a77d729aa2c 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
@@ -7,7 +7,7 @@ shared_examples 'merge quick action' do
visit project_merge_request_path(project, merge_request)
end
- it 'merges the MR' do
+ it 'merges the MR', :sidekiq_might_not_need_inline do
add_note("/merge")
expect(page).to have_content 'Scheduled to merge this merge request when the pipeline succeeds.'
diff --git a/spec/support/shared_examples/requests/api/discussions.rb b/spec/support/shared_examples/requests/api/discussions.rb
index a36bc2dc9b5..2a5a48f3054 100644
--- a/spec/support/shared_examples/requests/api/discussions.rb
+++ b/spec/support/shared_examples/requests/api/discussions.rb
@@ -117,6 +117,29 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
expect(response).to have_gitlab_http_status(401)
end
+ it 'tracks a Notes::CreateService event' do
+ expect(Gitlab::Tracking).to receive(:event) do |category, action, data|
+ expect(category).to eq('Notes::CreateService')
+ expect(action).to eq('execute')
+ expect(data[:label]).to eq('note')
+ expect(data[:value]).to be_an(Integer)
+ end
+
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user), params: { body: 'hi!' }
+ end
+
+ context 'with notes_create_service_tracking feature flag disabled' do
+ before do
+ stub_feature_flags(notes_create_service_tracking: false)
+ end
+
+ it 'does not track any events' do
+ expect(Gitlab::Tracking).not_to receive(:event)
+
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions"), params: { body: 'hi!' }
+ end
+ end
+
context 'when an admin or owner makes the request' do
it 'accepts the creation date to be set' do
creation_time = 2.weeks.ago
diff --git a/spec/support/shared_examples/requests/api/notes.rb b/spec/support/shared_examples/requests/api/notes.rb
index 354ae7288b1..4ce78d885bc 100644
--- a/spec/support/shared_examples/requests/api/notes.rb
+++ b/spec/support/shared_examples/requests/api/notes.rb
@@ -139,7 +139,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
expect(response).to have_gitlab_http_status(401)
end
- it "creates an activity event when a note is created" do
+ it "creates an activity event when a note is created", :sidekiq_might_not_need_inline do
expect(Event).to receive(:create!)
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: 'hi!' }
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index a2e38cfc60b..c078e982e87 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -2,8 +2,9 @@
#
# Requires let variables:
# * throttle_setting_prefix: "throttle_authenticated_api", "throttle_authenticated_web", "throttle_protected_paths"
-# * get_args
-# * other_user_get_args
+# * request_method
+# * request_args
+# * other_user_request_args
# * requests_per_period
# * period_in_seconds
# * period
@@ -31,66 +32,66 @@ shared_examples_for 'rate-limited token-authenticated requests' do
it 'rejects requests over the rate limit' do
# At first, allow requests under the rate limit.
requests_per_period.times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
# the last straw
- expect_rejection { get(*get_args) }
+ expect_rejection { make_request(request_args) }
end
it 'allows requests after throttling and then waiting for the next period' do
requests_per_period.times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
- expect_rejection { get(*get_args) }
+ expect_rejection { make_request(request_args) }
Timecop.travel(period.from_now) do
requests_per_period.times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
- expect_rejection { get(*get_args) }
+ expect_rejection { make_request(request_args) }
end
end
it 'counts requests from different users separately, even from the same IP' do
requests_per_period.times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
# would be over the limit if this wasn't a different user
- get(*other_user_get_args)
- expect(response).to have_http_status 200
+ make_request(other_user_request_args)
+ expect(response).not_to have_http_status 429
end
it 'counts all requests from the same user, even via different IPs' do
requests_per_period.times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
- expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4')
+ expect_any_instance_of(Rack::Attack::Request).to receive(:ip).at_least(:once).and_return('1.2.3.4')
- expect_rejection { get(*get_args) }
+ expect_rejection { make_request(request_args) }
end
it 'logs RackAttack info into structured logs' do
requests_per_period.times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
arguments = {
message: 'Rack_Attack',
env: :throttle,
remote_ip: '127.0.0.1',
- request_method: 'GET',
- path: get_args.first,
+ request_method: request_method,
+ path: request_args.first,
user_id: user.id,
username: user.username,
throttle_type: throttle_types[throttle_setting_prefix]
@@ -98,7 +99,7 @@ shared_examples_for 'rate-limited token-authenticated requests' do
expect(Gitlab::AuthLogger).to receive(:error).with(arguments).once
- expect_rejection { get(*get_args) }
+ expect_rejection { make_request(request_args) }
end
end
@@ -110,17 +111,26 @@ shared_examples_for 'rate-limited token-authenticated requests' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
- get(*get_args)
- expect(response).to have_http_status 200
+ make_request(request_args)
+ expect(response).not_to have_http_status 429
end
end
end
+
+ def make_request(args)
+ if request_method == 'POST'
+ post(*args)
+ else
+ get(*args)
+ end
+ end
end
# Requires let variables:
# * throttle_setting_prefix: "throttle_authenticated_web" or "throttle_protected_paths"
# * user
# * url_that_requires_authentication
+# * request_method
# * requests_per_period
# * period_in_seconds
# * period
@@ -149,68 +159,68 @@ shared_examples_for 'rate-limited web authenticated requests' do
it 'rejects requests over the rate limit' do
# At first, allow requests under the rate limit.
requests_per_period.times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
# the last straw
- expect_rejection { get url_that_requires_authentication }
+ expect_rejection { request_authenticated_web_url }
end
it 'allows requests after throttling and then waiting for the next period' do
requests_per_period.times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
- expect_rejection { get url_that_requires_authentication }
+ expect_rejection { request_authenticated_web_url }
Timecop.travel(period.from_now) do
requests_per_period.times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
- expect_rejection { get url_that_requires_authentication }
+ expect_rejection { request_authenticated_web_url }
end
end
it 'counts requests from different users separately, even from the same IP' do
requests_per_period.times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
# would be over the limit if this wasn't a different user
login_as(create(:user))
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
it 'counts all requests from the same user, even via different IPs' do
requests_per_period.times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
- expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4')
+ expect_any_instance_of(Rack::Attack::Request).to receive(:ip).at_least(:once).and_return('1.2.3.4')
- expect_rejection { get url_that_requires_authentication }
+ expect_rejection { request_authenticated_web_url }
end
it 'logs RackAttack info into structured logs' do
requests_per_period.times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
arguments = {
message: 'Rack_Attack',
env: :throttle,
remote_ip: '127.0.0.1',
- request_method: 'GET',
- path: '/dashboard/snippets',
+ request_method: request_method,
+ path: url_that_requires_authentication,
user_id: user.id,
username: user.username,
throttle_type: throttle_types[throttle_setting_prefix]
@@ -218,7 +228,7 @@ shared_examples_for 'rate-limited web authenticated requests' do
expect(Gitlab::AuthLogger).to receive(:error).with(arguments).once
- get url_that_requires_authentication
+ request_authenticated_web_url
end
end
@@ -230,9 +240,17 @@ shared_examples_for 'rate-limited web authenticated requests' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
- get url_that_requires_authentication
- expect(response).to have_http_status 200
+ request_authenticated_web_url
+ expect(response).not_to have_http_status 429
end
end
end
+
+ def request_authenticated_web_url
+ if request_method == 'POST'
+ post url_that_requires_authentication
+ else
+ get url_that_requires_authentication
+ end
+ end
end
diff --git a/spec/support/shared_examples/serializers/diff_file_entity_examples.rb b/spec/support/shared_examples/serializers/diff_file_entity_examples.rb
index 96cb71be737..d2c269c597c 100644
--- a/spec/support/shared_examples/serializers/diff_file_entity_examples.rb
+++ b/spec/support/shared_examples/serializers/diff_file_entity_examples.rb
@@ -31,14 +31,43 @@ shared_examples 'diff file entity' do
it 'exposes correct attributes' do
expect(subject).to include(:added_lines, :removed_lines,
- :context_lines_path, :highlighted_diff_lines,
- :parallel_diff_lines)
+ :context_lines_path)
end
it 'includes viewer' do
expect(subject[:viewer].with_indifferent_access)
.to match_schema('entities/diff_viewer')
end
+
+ context 'diff files' do
+ context 'when diff_view is parallel' do
+ let(:options) { { diff_view: :parallel } }
+
+ it 'contains only the parallel diff lines', :aggregate_failures do
+ expect(subject).to include(:parallel_diff_lines)
+ expect(subject).not_to include(:highlighted_diff_lines)
+ end
+ end
+
+ context 'when diff_view is parallel' do
+ let(:options) { { diff_view: :inline } }
+
+ it 'contains only the inline diff lines', :aggregate_failures do
+ expect(subject).not_to include(:parallel_diff_lines)
+ expect(subject).to include(:highlighted_diff_lines)
+ end
+ end
+
+ context 'when the `single_mr_diff_view` feature is disabled' do
+ before do
+ stub_feature_flags(single_mr_diff_view: false)
+ end
+
+ it 'contains both kinds of diffs' do
+ expect(subject).to include(:highlighted_diff_lines, :parallel_diff_lines)
+ end
+ end
+ end
end
shared_examples 'diff file discussion entity' do
diff --git a/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb b/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb
new file mode 100644
index 00000000000..83c6d89e560
--- /dev/null
+++ b/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+shared_examples 'error tracking service data not ready' do |service_call|
+ context "when #{service_call} returns nil" do
+ before do
+ expect(error_tracking_setting)
+ .to receive(service_call).and_return(nil)
+ end
+
+ it 'result is not ready' do
+ expect(result).to eq(
+ status: :error, http_status: :no_content, message: 'Not ready. Try again later')
+ end
+ end
+end
+
+shared_examples 'error tracking service sentry error handling' do |service_call|
+ context "when #{service_call} returns error" do
+ before do
+ allow(error_tracking_setting)
+ .to receive(service_call)
+ .and_return(
+ error: 'Sentry response status code: 401',
+ error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_NON_20X_RESPONSE
+ )
+ end
+
+ it 'returns the error' do
+ expect(result).to eq(
+ status: :error,
+ http_status: :bad_request,
+ message: 'Sentry response status code: 401'
+ )
+ end
+ end
+end
+
+shared_examples 'error tracking service http status handling' do |service_call|
+ context "when #{service_call} returns error with http_status" do
+ before do
+ allow(error_tracking_setting)
+ .to receive(service_call)
+ .and_return(
+ error: 'Sentry API response is missing keys. key not found: "id"',
+ error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_MISSING_KEYS
+ )
+ end
+
+ it 'returns the error with correct http_status' do
+ expect(result).to eq(
+ status: :error,
+ http_status: :internal_server_error,
+ message: 'Sentry API response is missing keys. key not found: "id"'
+ )
+ end
+ end
+end
+
+shared_examples 'error tracking service unauthorized user' do
+ context 'with unauthorized user' do
+ let(:unauthorized_user) { create(:user) }
+
+ subject { described_class.new(project, unauthorized_user) }
+
+ it 'returns error' do
+ result = subject.execute
+
+ expect(result).to include(
+ status: :error,
+ message: 'Access denied',
+ http_status: :unauthorized
+ )
+ end
+ end
+end
+
+shared_examples 'error tracking service disabled' do
+ context 'with error tracking disabled' do
+ before do
+ error_tracking_setting.enabled = false
+ end
+
+ it 'raises error' do
+ result = subject.execute
+
+ expect(result).to include(status: :error, message: 'Error Tracking is not enabled')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/updating_mentions_shared_examples.rb b/spec/support/shared_examples/updating_mentions_shared_examples.rb
index 9a8f8012762..84f6c4d136a 100644
--- a/spec/support/shared_examples/updating_mentions_shared_examples.rb
+++ b/spec/support/shared_examples/updating_mentions_shared_examples.rb
@@ -27,7 +27,7 @@ RSpec.shared_examples 'updating mentions' do |service_class|
update_mentionable(title: "For #{mentioned_user.to_reference}")
end
- it 'emails only the newly-mentioned user' do
+ it 'emails only the newly-mentioned user', :sidekiq_might_not_need_inline do
should_only_email(mentioned_user)
end
end
@@ -37,7 +37,7 @@ RSpec.shared_examples 'updating mentions' do |service_class|
update_mentionable(description: "For #{mentioned_user.to_reference}")
end
- it 'emails only the newly-mentioned user' do
+ it 'emails only the newly-mentioned user', :sidekiq_might_not_need_inline do
should_only_email(mentioned_user)
end
end
@@ -51,16 +51,32 @@ RSpec.shared_examples 'updating mentions' do |service_class|
)
end
- it 'emails group members' do
+ it 'emails group members', :sidekiq_might_not_need_inline do
should_email(mentioned_user)
should_email(group_member1)
should_email(group_member2)
end
end
+ shared_examples 'updating attribute with existing group mention' do |attribute|
+ before do
+ mentionable.update!({ attribute => "FYI: #{group.to_reference}" })
+ end
+
+ it 'creates todos for only newly mentioned users' do
+ expect do
+ update_mentionable(
+ { attribute => "For #{group.to_reference}, cc: #{mentioned_user.to_reference}" }
+ )
+ end.to change { Todo.count }.by(1)
+ end
+ end
+
context 'when group is public' do
it_behaves_like 'updating attribute with allowed mentions', :title
it_behaves_like 'updating attribute with allowed mentions', :description
+ it_behaves_like 'updating attribute with existing group mention', :title
+ it_behaves_like 'updating attribute with existing group mention', :description
end
context 'when the group is private' do
@@ -70,6 +86,8 @@ RSpec.shared_examples 'updating mentions' do |service_class|
it_behaves_like 'updating attribute with allowed mentions', :title
it_behaves_like 'updating attribute with allowed mentions', :description
+ it_behaves_like 'updating attribute with existing group mention', :title
+ it_behaves_like 'updating attribute with existing group mention', :description
end
end
@@ -81,7 +99,7 @@ RSpec.shared_examples 'updating mentions' do |service_class|
)
end
- it 'emails mentioned user' do
+ it 'emails mentioned user', :sidekiq_might_not_need_inline do
should_only_email(mentioned_user)
end
end
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index 585c458a64e..246efedc7e5 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'sidekiq/testing/inline'
+require 'sidekiq/testing'
# If Sidekiq::Testing.inline! is used, SQL transactions done inside
# Sidekiq worker are included in the SQL query limit (in a real
@@ -27,7 +27,9 @@ Sidekiq::Testing.server_middleware do |chain|
end
RSpec.configure do |config|
- config.after(:each, :sidekiq) do
+ config.around(:each, :sidekiq) do |example|
+ Sidekiq::Worker.clear_all
+ example.run
Sidekiq::Worker.clear_all
end
@@ -36,4 +38,19 @@ RSpec.configure do |config|
connection.redis.flushdb
end
end
+
+ # As we'll review the examples with this tag, we should either:
+ # - fix the example to not require Sidekiq inline mode (and remove this tag)
+ # - explicitly keep the inline mode and change the tag for `:sidekiq_inline` instead
+ config.around(:example, :sidekiq_might_not_need_inline) do |example|
+ Sidekiq::Worker.clear_all
+ Sidekiq::Testing.inline! { example.run }
+ Sidekiq::Worker.clear_all
+ end
+
+ config.around(:example, :sidekiq_inline) do |example|
+ Sidekiq::Worker.clear_all
+ Sidekiq::Testing.inline! { example.run }
+ Sidekiq::Worker.clear_all
+ end
end
diff --git a/spec/support/unpack-gitlab-git-test b/spec/support/unpack-gitlab-git-test
index d5b4912457d..5d5f1b7d082 100755
--- a/spec/support/unpack-gitlab-git-test
+++ b/spec/support/unpack-gitlab-git-test
@@ -1,10 +1,12 @@
#!/usr/bin/env ruby
+# frozen_string_literal: true
+
require 'fileutils'
-REPO = 'spec/support/gitlab-git-test.git'.freeze
+REPO = 'spec/support/gitlab-git-test.git'
PACK_DIR = REPO + '/objects/pack'
GIT = %W[git --git-dir=#{REPO}].freeze
-BASE_PACK = 'pack-691247af2a6acb0b63b73ac0cb90540e93614043'.freeze
+BASE_PACK = 'pack-691247af2a6acb0b63b73ac0cb90540e93614043'
def main
unpack
diff --git a/spec/tasks/gitlab/shell_rake_spec.rb b/spec/tasks/gitlab/shell_rake_spec.rb
index abad16be580..08b3fea0c80 100644
--- a/spec/tasks/gitlab/shell_rake_spec.rb
+++ b/spec/tasks/gitlab/shell_rake_spec.rb
@@ -17,7 +17,7 @@ describe 'gitlab:shell rake tasks' do
expect_any_instance_of(Gitlab::TaskHelpers).to receive(:checkout_or_clone_version)
allow(Kernel).to receive(:system).with('bin/install', *storages).and_return(true)
- allow(Kernel).to receive(:system).with('bin/compile').and_return(true)
+ allow(Kernel).to receive(:system).with('make', 'build').and_return(true)
run_rake_task('gitlab:shell:install')
end
diff --git a/spec/tasks/gitlab/task_helpers_spec.rb b/spec/tasks/gitlab/task_helpers_spec.rb
index 4b4f7d7c956..4546d3bdfaf 100644
--- a/spec/tasks/gitlab/task_helpers_spec.rb
+++ b/spec/tasks/gitlab/task_helpers_spec.rb
@@ -20,22 +20,12 @@ describe Gitlab::TaskHelpers do
end
it 'checkout the version and reset to it' do
+ expect(subject).to receive(:get_version).with(version).and_call_original
expect(subject).to receive(:checkout_version).with(tag, clone_path)
subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path)
end
- context 'with a branch version' do
- let(:version) { '=branch_name' }
- let(:branch) { 'branch_name' }
-
- it 'checkout the version and reset to it with a branch name' do
- expect(subject).to receive(:checkout_version).with(branch, clone_path)
-
- subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path)
- end
- end
-
context "target_dir doesn't exist" do
it 'clones the repo' do
expect(subject).to receive(:clone_repo).with(repo, clone_path)
@@ -96,4 +86,19 @@ describe Gitlab::TaskHelpers do
expect { subject.run_command!(['bash', '-c', 'exit 1']) }.to raise_error Gitlab::TaskFailedError
end
end
+
+ describe '#get_version' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:version, :result) do
+ '1.1.1' | 'v1.1.1'
+ 'master' | 'master'
+ '12.4.0-rc7' | 'v12.4.0-rc7'
+ '594c3ea3e0e5540e5915bd1c49713a0381459dd6' | '594c3ea3e0e5540e5915bd1c49713a0381459dd6'
+ end
+
+ with_them do
+ it { expect(subject.get_version(version)).to eq(result) }
+ end
+ end
end
diff --git a/spec/uploaders/workers/object_storage/background_move_worker_spec.rb b/spec/uploaders/workers/object_storage/background_move_worker_spec.rb
index cf4872d6904..38b70d33993 100644
--- a/spec/uploaders/workers/object_storage/background_move_worker_spec.rb
+++ b/spec/uploaders/workers/object_storage/background_move_worker_spec.rb
@@ -22,7 +22,7 @@ describe ObjectStorage::BackgroundMoveWorker do
stub_lfs_object_storage(background_upload: true)
end
- it 'uploads object to storage' do
+ it 'uploads object to storage', :sidekiq_might_not_need_inline do
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
@@ -65,7 +65,7 @@ describe ObjectStorage::BackgroundMoveWorker do
stub_artifacts_object_storage(background_upload: true)
end
- it "migrates file to remote storage" do
+ it "migrates file to remote storage", :sidekiq_might_not_need_inline do
perform
expect(artifact.reload.file_store).to eq(remote)
@@ -91,7 +91,7 @@ describe ObjectStorage::BackgroundMoveWorker do
let(:subject_class) { project.class }
let(:subject_id) { project.id }
- it "migrates file to remote storage" do
+ it "migrates file to remote storage", :sidekiq_might_not_need_inline do
perform
project.reload
BatchLoader::Executor.clear_current
@@ -104,7 +104,7 @@ describe ObjectStorage::BackgroundMoveWorker do
let(:subject_class) { Upload }
let(:subject_id) { project.avatar.upload.id }
- it "migrates file to remote storage" do
+ it "migrates file to remote storage", :sidekiq_might_not_need_inline do
perform
expect(project.reload.avatar).not_to be_file_storage
diff --git a/spec/views/admin/application_settings/integrations.html.haml_spec.rb b/spec/views/admin/application_settings/integrations.html.haml_spec.rb
new file mode 100644
index 00000000000..392d43ef2d4
--- /dev/null
+++ b/spec/views/admin/application_settings/integrations.html.haml_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'admin/application_settings/integrations.html.haml' do
+ let(:app_settings) { build(:application_setting) }
+
+ describe 'sourcegraph integration' do
+ let(:sourcegraph_flag) { true }
+
+ before do
+ assign(:application_setting, app_settings)
+ allow(Gitlab::Sourcegraph).to receive(:feature_available?).and_return(sourcegraph_flag)
+ end
+
+ context 'when sourcegraph feature is enabled' do
+ it 'show the form' do
+ render
+
+ expect(rendered).to have_field('application_setting_sourcegraph_enabled')
+ end
+ end
+
+ context 'when sourcegraph feature is disabled' do
+ let(:sourcegraph_flag) { false }
+
+ it 'show the form' do
+ render
+
+ expect(rendered).not_to have_field('application_setting_sourcegraph_enabled')
+ end
+ end
+ end
+end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
new file mode 100644
index 00000000000..66afc2af7ce
--- /dev/null
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'devise/sessions/new' do
+ describe 'ldap' do
+ include LdapHelpers
+
+ let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access }
+
+ before do
+ enable_ldap
+ stub_devise
+ disable_captcha
+ disable_sign_up
+ disable_other_signin_methods
+
+ allow(view).to receive(:experiment_enabled?).and_return(false)
+ end
+
+ it 'is shown when enabled' do
+ render
+
+ expect(rendered).to have_selector('.new-session-tabs')
+ expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]')
+ expect(rendered).to have_field('LDAP Username')
+ end
+
+ it 'is not shown when LDAP sign in is disabled' do
+ disable_ldap_sign_in
+
+ render
+
+ expect(rendered).to have_content('No authentication methods configured')
+ expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]')
+ expect(rendered).not_to have_field('LDAP Username')
+ end
+ end
+
+ def disable_other_signin_methods
+ allow(view).to receive(:password_authentication_enabled_for_web?).and_return(false)
+ allow(view).to receive(:omniauth_enabled?).and_return(false)
+ end
+
+ def disable_sign_up
+ allow(view).to receive(:allow_signup?).and_return(false)
+ end
+
+ def stub_devise
+ allow(view).to receive(:devise_mapping).and_return(Devise.mappings[:user])
+ allow(view).to receive(:resource).and_return(spy)
+ allow(view).to receive(:resource_name).and_return(:user)
+ end
+
+ def enable_ldap
+ stub_ldap_setting(enabled: true)
+ assign(:ldap_servers, [server])
+ allow(view).to receive(:form_based_providers).and_return([:ldapmain])
+ allow(view).to receive(:omniauth_callback_path).with(:user, 'ldapmain').and_return('/ldapmain')
+ end
+
+ def disable_ldap_sign_in
+ allow(view).to receive(:ldap_sign_in_enabled?).and_return(false)
+ assign(:ldap_servers, [])
+ end
+
+ def disable_captcha
+ allow(view).to receive(:captcha_enabled?).and_return(false)
+ allow(view).to receive(:captcha_on_login_required?).and_return(false)
+ end
+end
diff --git a/spec/views/layouts/_head.html.haml_spec.rb b/spec/views/layouts/_head.html.haml_spec.rb
index e9b3334fffc..f181e18e53d 100644
--- a/spec/views/layouts/_head.html.haml_spec.rb
+++ b/spec/views/layouts/_head.html.haml_spec.rb
@@ -84,7 +84,7 @@ describe 'layouts/_head' do
allow(Gitlab::CurrentSettings).to receive(:snowplow_collector_hostname).and_return('www.snow.plow')
end
- it 'add a snowplow script tag with asset host' do
+ it 'adds a snowplow script tag with asset host' do
render
expect(rendered).to match('http://test.host/assets/snowplow/')
expect(rendered).to match('window.snowplow')
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
new file mode 100644
index 00000000000..52933c42621
--- /dev/null
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'profiles/preferences/show' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { build(:user) }
+
+ before do
+ assign(:user, user)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ context 'sourcegraph' do
+ def have_sourcegraph_field(*args)
+ have_field('user_sourcegraph_enabled', *args)
+ end
+
+ def have_integrations_section
+ have_css('.profile-settings-sidebar', { text: 'Integrations' })
+ end
+
+ before do
+ # Can't use stub_feature_flags because we use Feature.get to check if conditinally applied
+ Feature.get(:sourcegraph).enable sourcegraph_feature
+ stub_application_setting(sourcegraph_enabled: sourcegraph_enabled)
+ end
+
+ context 'when not fully enabled' do
+ where(:feature, :admin_enabled) do
+ false | false
+ false | true
+ true | false
+ end
+
+ with_them do
+ let(:sourcegraph_feature) { feature }
+ let(:sourcegraph_enabled) { admin_enabled }
+
+ before do
+ render
+ end
+
+ it 'does not display sourcegraph field' do
+ expect(rendered).not_to have_sourcegraph_field
+ end
+
+ it 'does not display integrations settings' do
+ expect(rendered).not_to have_integrations_section
+ end
+ end
+ end
+
+ context 'when fully enabled' do
+ let(:sourcegraph_feature) { true }
+ let(:sourcegraph_enabled) { true }
+
+ before do
+ render
+ end
+
+ it 'displays the sourcegraph field' do
+ expect(rendered).to have_sourcegraph_field
+ end
+
+ it 'displays the integrations section' do
+ expect(rendered).to have_integrations_section
+ end
+ end
+ end
+end
diff --git a/spec/views/profiles/show.html.haml_spec.rb b/spec/views/profiles/show.html.haml_spec.rb
index 592b3a56ba3..14e6feed3ab 100644
--- a/spec/views/profiles/show.html.haml_spec.rb
+++ b/spec/views/profiles/show.html.haml_spec.rb
@@ -8,6 +8,7 @@ describe 'profiles/show' do
before do
assign(:user, user)
allow(controller).to receive(:current_user).and_return(user)
+ allow(view).to receive(:experiment_enabled?)
end
context 'when the profile page is opened' do
diff --git a/spec/views/projects/clusters/clusters/gcp/_form.html.haml_spec.rb b/spec/views/projects/clusters/clusters/gcp/_form.html.haml_spec.rb
new file mode 100644
index 00000000000..1cb2f9a4301
--- /dev/null
+++ b/spec/views/projects/clusters/clusters/gcp/_form.html.haml_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'clusters/clusters/gcp/_form' do
+ let(:admin) { create(:admin) }
+ let(:environment) { create(:environment) }
+ let(:gcp_cluster) { create(:cluster, :provided_by_gcp) }
+ let(:clusterable) { ClusterablePresenter.fabricate(environment.project, current_user: admin) }
+
+ before do
+ assign(:environment, environment)
+ assign(:gcp_cluster, gcp_cluster)
+ allow(view).to receive(:clusterable).and_return(clusterable)
+ allow(view).to receive(:url_for).and_return('#')
+ allow(view).to receive(:token_in_session).and_return('')
+ end
+
+ context 'with all feature flags enabled' do
+ it 'has a cloud run checkbox' do
+ render
+
+ expect(rendered).to have_selector("input[id='cluster_provider_gcp_attributes_cloud_run']")
+ end
+ end
+
+ context 'with cloud run feature flag disabled' do
+ before do
+ stub_feature_flags(create_cloud_run_clusters: false)
+ end
+
+ it 'does not have a cloud run checkbox' do
+ render
+
+ expect(rendered).not_to have_selector("input[id='cluster_provider_gcp_attributes_cloud_run']")
+ end
+ end
+end
diff --git a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
index 54ec4f32856..9168bc8e833 100644
--- a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
+++ b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
@@ -48,7 +48,7 @@ describe 'projects/deployments/_confirm_rollback_modal' do
render
expect(rendered).to have_selector('h4', text: "Rollback environment #{environment.name}?")
- expect(rendered).to have_selector('p', text: "This action will run the job defined by staging for commit #{deployment.short_sha}, putting the environment in a previous version. You can revert it by re-deploying the latest version of your application. Are you sure you want to continue?")
+ expect(rendered).to have_selector('p', text: "This action will run the job defined by #{environment.name} for commit #{deployment.short_sha}, putting the environment in a previous version. You can revert it by re-deploying the latest version of your application. Are you sure you want to continue?")
expect(rendered).to have_selector('a.btn-danger', text: 'Rollback')
end
diff --git a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
index 71d74b06f85..755a40a7e4c 100644
--- a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'projects/merge_requests/_commits.html.haml' do
+describe 'projects/merge_requests/_commits.html.haml', :sidekiq_might_not_need_inline do
include Devise::Test::ControllerHelpers
include ProjectForksHelper
diff --git a/spec/views/projects/pages_domains/show.html.haml_spec.rb b/spec/views/projects/pages_domains/show.html.haml_spec.rb
index ba0544a49b0..331bfe63f28 100644
--- a/spec/views/projects/pages_domains/show.html.haml_spec.rb
+++ b/spec/views/projects/pages_domains/show.html.haml_spec.rb
@@ -30,39 +30,5 @@ describe 'projects/pages_domains/show' do
expect(rendered).to have_content("GitLab is obtaining a Let's Encrypt SSL certificate for this domain. This process can take some time. Please try again later.")
end
end
-
- context 'when certificate is present' do
- let(:domain) { create(:pages_domain, :letsencrypt, project: project) }
-
- it 'shows certificate info' do
- render
-
- # test just a random part of cert represenations(X509v3 Subject Key Identifier:)
- expect(rendered).to have_content("C6:5F:56:4B:10:69:AC:1D:33:D2:26:C9:B3:7A:D7:12:4D:3E:F7:90")
- end
- end
- end
-
- context 'when auto_ssl is disabled' do
- context 'when certificate is present' do
- let(:domain) { create(:pages_domain, project: project) }
-
- it 'shows certificate info' do
- render
-
- # test just a random part of cert represenations(X509v3 Subject Key Identifier:)
- expect(rendered).to have_content("C6:5F:56:4B:10:69:AC:1D:33:D2:26:C9:B3:7A:D7:12:4D:3E:F7:90")
- end
- end
-
- context 'when certificate is absent' do
- let(:domain) { create(:pages_domain, :without_certificate, :without_key, project: project) }
-
- it 'shows missing certificate' do
- render
-
- expect(rendered).to have_content("missing")
- end
- end
end
end
diff --git a/spec/views/projects/show.html.haml_spec.rb b/spec/views/projects/show.html.haml_spec.rb
new file mode 100644
index 00000000000..4f5f0f0285c
--- /dev/null
+++ b/spec/views/projects/show.html.haml_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'projects/show' do
+ include Devise::Test::ControllerHelpers
+
+ let(:user) { create(:admin) }
+ let(:project) { create(:project, :repository) }
+
+ before do
+ presented_project = project.present(current_user: user)
+
+ allow(presented_project).to receive(:default_view).and_return('customize_workflow')
+ allow(controller).to receive(:current_user).and_return(user)
+
+ assign(:project, presented_project)
+ end
+
+ context 'commit signatures' do
+ context 'with vue tree view enabled' do
+ it 'are not rendered via js-signature-container' do
+ render
+
+ expect(rendered).not_to have_css('.js-signature-container')
+ end
+ end
+
+ context 'with vue tree view disabled' do
+ before do
+ stub_feature_flags(vue_file_list: false)
+ end
+
+ it 'rendered via js-signature-container' do
+ render
+
+ expect(rendered).to have_css('.js-signature-container')
+ end
+ end
+ end
+end
diff --git a/spec/views/projects/tree/_tree_header.html.haml_spec.rb b/spec/views/projects/tree/_tree_header.html.haml_spec.rb
index 4b71ea9ffe3..caf8c4d1969 100644
--- a/spec/views/projects/tree/_tree_header.html.haml_spec.rb
+++ b/spec/views/projects/tree/_tree_header.html.haml_spec.rb
@@ -8,6 +8,8 @@ describe 'projects/tree/_tree_header' do
let(:repository) { project.repository }
before do
+ stub_feature_flags(vue_file_list: false)
+
assign(:project, project)
assign(:repository, repository)
assign(:id, File.join('master', ''))
diff --git a/spec/views/projects/tree/show.html.haml_spec.rb b/spec/views/projects/tree/show.html.haml_spec.rb
index 960cf42a793..8c6b229247d 100644
--- a/spec/views/projects/tree/show.html.haml_spec.rb
+++ b/spec/views/projects/tree/show.html.haml_spec.rb
@@ -7,10 +7,12 @@ describe 'projects/tree/show' do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
+ let(:ref) { 'master' }
+ let(:commit) { repository.commit(ref) }
+ let(:path) { '' }
+ let(:tree) { repository.tree(commit.id, path) }
before do
- stub_feature_flags(vue_file_list: false)
-
assign(:project, project)
assign(:repository, repository)
assign(:lfs_blob_ids, [])
@@ -19,26 +21,44 @@ describe 'projects/tree/show' do
allow(view).to receive(:can_collaborate_with_project?).and_return(true)
allow(view).to receive_message_chain('user_access.can_push_to_branch?').and_return(true)
allow(view).to receive(:current_application_settings).and_return(Gitlab::CurrentSettings.current_application_settings)
+ allow(view).to receive(:current_user).and_return(project.creator)
+
+ assign(:id, File.join(ref, path))
+ assign(:ref, ref)
+ assign(:path, path)
+ assign(:last_commit, commit)
+ assign(:tree, tree)
end
context 'for branch names ending on .json' do
let(:ref) { 'ends-with.json' }
- let(:commit) { repository.commit(ref) }
- let(:path) { '' }
- let(:tree) { repository.tree(commit.id, path) }
-
- before do
- assign(:id, File.join(ref, path))
- assign(:ref, ref)
- assign(:path, path)
- assign(:last_commit, commit)
- assign(:tree, tree)
- end
it 'displays correctly' do
render
+
expect(rendered).to have_css('.js-project-refs-dropdown .dropdown-toggle-text', text: ref)
- expect(rendered).to have_css('.readme-holder')
+ end
+ end
+
+ context 'commit signatures' do
+ context 'with vue tree view disabled' do
+ before do
+ stub_feature_flags(vue_file_list: false)
+ end
+
+ it 'rendered via js-signature-container' do
+ render
+
+ expect(rendered).to have_css('.js-signature-container')
+ end
+ end
+
+ context 'with vue tree view enabled' do
+ it 'are not rendered via js-signature-container' do
+ render
+
+ expect(rendered).not_to have_css('.js-signature-container')
+ end
end
end
end
diff --git a/spec/workers/cluster_provision_worker_spec.rb b/spec/workers/cluster_provision_worker_spec.rb
index 3f69962f25d..608639331fd 100644
--- a/spec/workers/cluster_provision_worker_spec.rb
+++ b/spec/workers/cluster_provision_worker_spec.rb
@@ -9,7 +9,18 @@ describe ClusterProvisionWorker do
let(:provider) { create(:cluster_provider_gcp, :scheduled) }
it 'provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::ProvisionService).to receive(:execute)
+ expect_any_instance_of(Clusters::Gcp::ProvisionService).to receive(:execute).with(provider)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+
+ context 'when provider type is aws' do
+ let(:cluster) { create(:cluster, provider_type: :aws, provider_aws: provider) }
+ let(:provider) { create(:cluster_provider_aws, :scheduled) }
+
+ it 'provision a cluster' do
+ expect_any_instance_of(Clusters::Aws::ProvisionService).to receive(:execute).with(provider)
described_class.new.perform(cluster.id)
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index b7ba4d61723..5ceb54eb2d5 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -21,8 +21,8 @@ describe 'Every Sidekiq worker' do
missing_from_file = worker_queues - file_worker_queues
expect(missing_from_file).to be_empty, "expected #{missing_from_file.to_a.inspect} to be in Gitlab::SidekiqConfig::QUEUE_CONFIG_PATHS"
- unncessarily_in_file = file_worker_queues - worker_queues
- expect(unncessarily_in_file).to be_empty, "expected #{unncessarily_in_file.to_a.inspect} not to be in Gitlab::SidekiqConfig::QUEUE_CONFIG_PATHS"
+ unnecessarily_in_file = file_worker_queues - worker_queues
+ expect(unnecessarily_in_file).to be_empty, "expected #{unnecessarily_in_file.to_a.inspect} not to be in Gitlab::SidekiqConfig::QUEUE_CONFIG_PATHS"
end
it 'has its queue or namespace in config/sidekiq_queues.yml', :aggregate_failures do
@@ -42,7 +42,7 @@ describe 'Every Sidekiq worker' do
end
# All Sidekiq worker classes should declare a valid `feature_category`
- # or explicitely be excluded with the `feature_category_not_owned!` annotation.
+ # or explicitly be excluded with the `feature_category_not_owned!` annotation.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category or feature_category_not_owned! attribute', :aggregate_failures do
Gitlab::SidekiqConfig.workers.each do |worker|
@@ -62,5 +62,36 @@ describe 'Every Sidekiq worker' do
expect(feature_categories).to include(worker.get_feature_category), "expected #{worker.inspect} to declare a valid feature_category, but got #{worker.get_feature_category}"
end
end
+
+ # Memory-bound workers are very expensive to run, since they need to run on nodes with very low
+ # concurrency, so that each job can consume a large amounts of memory. For this reason, on
+ # GitLab.com, when a large number of memory-bound jobs arrive at once, we let them queue up
+ # rather than scaling the hardware to meet the SLO. For this reason, memory-bound,
+ # latency-sensitive jobs are explicitly discouraged and disabled.
+ it 'is (exclusively) memory-bound or latency-sentitive, not both', :aggregate_failures do
+ latency_sensitive_workers = Gitlab::SidekiqConfig.workers
+ .select(&:latency_sensitive_worker?)
+
+ latency_sensitive_workers.each do |worker|
+ expect(worker.get_worker_resource_boundary).not_to eq(:memory), "#{worker.inspect} cannot be both memory-bound and latency sensitive"
+ end
+ end
+
+ # In high traffic installations, such as GitLab.com, `latency_sensitive` workers run in a
+ # dedicated fleet. In order to ensure short queue times, `latency_sensitive` jobs have strict
+ # SLOs in order to ensure throughput. However, when a worker depends on an external service,
+ # such as a user's k8s cluster or a third-party internet service, we cannot guarantee latency,
+ # and therefore throughput. An outage to an 3rd party service could therefore impact throughput
+ # on other latency_sensitive jobs, leading to degradation through the GitLab application.
+ # Please see doc/development/sidekiq_style_guide.md#Jobs-with-External-Dependencies for more
+ # details.
+ it 'has (exclusively) external dependencies or is latency-sentitive, not both', :aggregate_failures do
+ latency_sensitive_workers = Gitlab::SidekiqConfig.workers
+ .select(&:latency_sensitive_worker?)
+
+ latency_sensitive_workers.each do |worker|
+ expect(worker.worker_has_external_dependencies?).to be_falsey, "#{worker.inspect} cannot have both external dependencies and be latency sensitive"
+ end
+ end
end
end
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 74d6b5605d1..0a0aea838d2 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -3,62 +3,11 @@
require 'spec_helper'
describe ExpireBuildArtifactsWorker do
- include RepoHelpers
-
let(:worker) { described_class.new }
- before do
- Sidekiq::Worker.clear_all
- end
-
describe '#perform' do
- before do
- stub_feature_flags(ci_new_expire_job_artifacts_service: false)
- build
- end
-
- subject! do
- Sidekiq::Testing.fake! { worker.perform }
- end
-
- context 'with expired artifacts' do
- let(:build) { create(:ci_build, :artifacts, artifacts_expire_at: Time.now - 7.days) }
-
- it 'enqueues that build' do
- expect(jobs_enqueued.size).to eq(1)
- expect(jobs_enqueued[0]["args"]).to eq([build.id])
- end
- end
-
- context 'with not yet expired artifacts' do
- let(:build) { create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days) }
-
- it 'does not enqueue that build' do
- expect(jobs_enqueued.size).to eq(0)
- end
- end
-
- context 'without expire date' do
- let(:build) { create(:ci_build, :artifacts) }
-
- it 'does not enqueue that build' do
- expect(jobs_enqueued.size).to eq(0)
- end
- end
-
- def jobs_enqueued
- Sidekiq::Queues.jobs_by_worker['ExpireBuildInstanceArtifactsWorker']
- end
- end
-
- describe '#perform with ci_new_expire_job_artifacts_service feature flag' do
- before do
- stub_feature_flags(ci_new_expire_job_artifacts_service: true)
- end
-
it 'executes a service' do
expect_any_instance_of(Ci::DestroyExpiredJobArtifactsService).to receive(:execute)
- expect(ExpireBuildInstanceArtifactsWorker).not_to receive(:bulk_perform_async)
worker.perform
end
diff --git a/spec/workers/group_export_worker_spec.rb b/spec/workers/group_export_worker_spec.rb
new file mode 100644
index 00000000000..4aa85d2b381
--- /dev/null
+++ b/spec/workers/group_export_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GroupExportWorker do
+ let!(:user) { create(:user) }
+ let!(:group) { create(:group) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ context 'when it succeeds' do
+ it 'calls the ExportService' do
+ expect_any_instance_of(::Groups::ImportExport::ExportService).to receive(:execute)
+
+ subject.perform(user.id, group.id, {})
+ end
+ end
+
+ context 'when it fails' do
+ it 'raises an exception when params are invalid' do
+ expect_any_instance_of(::Groups::ImportExport::ExportService).not_to receive(:execute)
+
+ expect { subject.perform(1234, group.id, {}) }.to raise_exception(ActiveRecord::RecordNotFound)
+ expect { subject.perform(user.id, 1234, {}) }.to raise_exception(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/workers/hashed_storage/migrator_worker_spec.rb b/spec/workers/hashed_storage/migrator_worker_spec.rb
index 12c1a26104e..9180da87058 100644
--- a/spec/workers/hashed_storage/migrator_worker_spec.rb
+++ b/spec/workers/hashed_storage/migrator_worker_spec.rb
@@ -15,7 +15,7 @@ describe HashedStorage::MigratorWorker do
worker.perform(5, 10)
end
- it 'migrates projects in the specified range' do
+ it 'migrates projects in the specified range', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
worker.perform(ids.min, ids.max)
end
diff --git a/spec/workers/hashed_storage/rollbacker_worker_spec.rb b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
index 5fcb1adf9ae..3ca2601df0f 100644
--- a/spec/workers/hashed_storage/rollbacker_worker_spec.rb
+++ b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
@@ -15,7 +15,7 @@ describe HashedStorage::RollbackerWorker do
worker.perform(5, 10)
end
- it 'rollsback projects in the specified range' do
+ it 'rollsback projects in the specified range', :sidekiq_might_not_need_inline do
perform_enqueued_jobs do
worker.perform(ids.min, ids.max)
end
diff --git a/spec/workers/merge_worker_spec.rb b/spec/workers/merge_worker_spec.rb
index 138a99abde6..dc98c9836fa 100644
--- a/spec/workers/merge_worker_spec.rb
+++ b/spec/workers/merge_worker_spec.rb
@@ -20,6 +20,7 @@ describe MergeWorker do
described_class.new.perform(
merge_request.id, merge_request.author_id,
commit_message: 'wow such merge',
+ sha: merge_request.diff_head_sha,
should_remove_source_branch: true)
merge_request.reload
diff --git a/spec/workers/new_note_worker_spec.rb b/spec/workers/new_note_worker_spec.rb
index 2966a201a62..ae62237960a 100644
--- a/spec/workers/new_note_worker_spec.rb
+++ b/spec/workers/new_note_worker_spec.rb
@@ -7,16 +7,17 @@ describe NewNoteWorker do
let(:note) { create(:note) }
it "calls NotificationService#new_note" do
- expect_any_instance_of(NotificationService).to receive(:new_note).with(note)
+ expect_next_instance_of(NotificationService) do |service|
+ expect(service).to receive(:new_note).with(note)
+ end
described_class.new.perform(note.id)
end
it "calls Notes::PostProcessService#execute" do
- notes_post_process_service = double(Notes::PostProcessService)
- allow(Notes::PostProcessService).to receive(:new).with(note) { notes_post_process_service }
-
- expect(notes_post_process_service).to receive(:execute)
+ expect_next_instance_of(Notes::PostProcessService) do |service|
+ expect(service).to receive(:execute)
+ end
described_class.new.perform(note.id)
end
@@ -36,14 +37,14 @@ describe NewNoteWorker do
expect { described_class.new.perform(unexistent_note_id) }.not_to raise_error
end
- it "does not call NotificationService#new_note" do
- expect_any_instance_of(NotificationService).not_to receive(:new_note)
+ it "does not call NotificationService" do
+ expect(NotificationService).not_to receive(:new)
described_class.new.perform(unexistent_note_id)
end
- it "does not call Notes::PostProcessService#execute" do
- expect_any_instance_of(Notes::PostProcessService).not_to receive(:execute)
+ it "does not call Notes::PostProcessService" do
+ expect(Notes::PostProcessService).not_to receive(:new)
described_class.new.perform(unexistent_note_id)
end
diff --git a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
index 08a3511f70b..10c23cbb6d4 100644
--- a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
+++ b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
@@ -13,7 +13,7 @@ describe PagesDomainSslRenewalCronWorker do
describe '#perform' do
let(:project) { create :project }
- let!(:domain) { create(:pages_domain, project: project) }
+ let!(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: false) }
let!(:domain_with_enabled_auto_ssl) { create(:pages_domain, project: project, auto_ssl_enabled: true) }
let!(:domain_with_obtained_letsencrypt) do
create(:pages_domain, :letsencrypt, project: project, auto_ssl_enabled: true)
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 9326db34209..4926c14a6ab 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -28,7 +28,7 @@ describe PipelineScheduleWorker do
context 'when there is a scheduled pipeline within next_run_at' do
shared_examples 'successful scheduling' do
- it 'creates a new pipeline' do
+ it 'creates a new pipeline', :sidekiq_might_not_need_inline do
expect { subject }.to change { project.ci_pipelines.count }.by(1)
expect(Ci::Pipeline.last).to be_schedule
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index eb1d3c364ac..99800135075 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -81,9 +81,10 @@ describe ProcessCommitWorker do
let(:commit) do
project.repository.create_branch('feature-merged', 'feature')
+ project.repository.after_create_branch
MergeRequests::MergeService
- .new(project, merge_request.author)
+ .new(project, merge_request.author, { sha: merge_request.diff_head_sha })
.execute(merge_request)
merge_request.reload.merge_commit
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index 7f3c4881b89..fa02762d716 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -105,7 +105,7 @@ describe ProjectCacheWorker do
end
context 'when a lease could be obtained' do
- it 'updates the project statistics twice' do
+ it 'updates the project statistics twice', :sidekiq_might_not_need_inline do
stub_exclusive_lease(lease_key, timeout: lease_timeout)
expect(Projects::UpdateStatisticsService).to receive(:new)
diff --git a/spec/workers/remove_expired_group_links_worker_spec.rb b/spec/workers/remove_expired_group_links_worker_spec.rb
index 10d9aa37dee..9557aa3086c 100644
--- a/spec/workers/remove_expired_group_links_worker_spec.rb
+++ b/spec/workers/remove_expired_group_links_worker_spec.rb
@@ -4,23 +4,54 @@ require 'spec_helper'
describe RemoveExpiredGroupLinksWorker do
describe '#perform' do
- let!(:expired_project_group_link) { create(:project_group_link, expires_at: 1.hour.ago) }
- let!(:project_group_link_expiring_in_future) { create(:project_group_link, expires_at: 10.days.from_now) }
- let!(:non_expiring_project_group_link) { create(:project_group_link, expires_at: nil) }
+ context 'ProjectGroupLinks' do
+ let!(:expired_project_group_link) { create(:project_group_link, expires_at: 1.hour.ago) }
+ let!(:project_group_link_expiring_in_future) { create(:project_group_link, expires_at: 10.days.from_now) }
+ let!(:non_expiring_project_group_link) { create(:project_group_link, expires_at: nil) }
- it 'removes expired group links' do
- expect { subject.perform }.to change { ProjectGroupLink.count }.by(-1)
- expect(ProjectGroupLink.find_by(id: expired_project_group_link.id)).to be_nil
- end
+ it 'removes expired group links' do
+ expect { subject.perform }.to change { ProjectGroupLink.count }.by(-1)
+ expect(ProjectGroupLink.find_by(id: expired_project_group_link.id)).to be_nil
+ end
+
+ it 'leaves group links that expire in the future' do
+ subject.perform
+ expect(project_group_link_expiring_in_future.reload).to be_present
+ end
- it 'leaves group links that expire in the future' do
- subject.perform
- expect(project_group_link_expiring_in_future.reload).to be_present
+ it 'leaves group links that do not expire at all' do
+ subject.perform
+ expect(non_expiring_project_group_link.reload).to be_present
+ end
end
- it 'leaves group links that do not expire at all' do
- subject.perform
- expect(non_expiring_project_group_link.reload).to be_present
+ context 'GroupGroupLinks' do
+ let(:mock_destroy_service) { instance_double(Groups::GroupLinks::DestroyService) }
+
+ before do
+ allow(Groups::GroupLinks::DestroyService).to(
+ receive(:new).and_return(mock_destroy_service))
+ end
+
+ context 'expired GroupGroupLink exists' do
+ before do
+ create(:group_group_link, expires_at: 1.hour.ago)
+ end
+
+ it 'calls Groups::GroupLinks::DestroyService' do
+ expect(mock_destroy_service).to receive(:execute).once
+
+ subject.perform
+ end
+ end
+
+ context 'expired GroupGroupLink does not exist' do
+ it 'does not call Groups::GroupLinks::DestroyService' do
+ expect(mock_destroy_service).not_to receive(:execute)
+
+ subject.perform
+ end
+ end
end
end
end
diff --git a/spec/workers/repository_check/single_repository_worker_spec.rb b/spec/workers/repository_check/single_repository_worker_spec.rb
index 65e1c5e9d5d..6870e15424f 100644
--- a/spec/workers/repository_check/single_repository_worker_spec.rb
+++ b/spec/workers/repository_check/single_repository_worker_spec.rb
@@ -68,7 +68,7 @@ describe RepositoryCheck::SingleRepositoryWorker do
it 'creates missing wikis' do
project = create(:project, :wiki_enabled)
- Gitlab::Shell.new.rm_directory(project.repository_storage, project.wiki.path)
+ TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
subject.perform(project.id)
@@ -77,12 +77,12 @@ describe RepositoryCheck::SingleRepositoryWorker do
it 'does not create a wiki if the main repo does not exist at all' do
project = create(:project, :repository)
- Gitlab::Shell.new.rm_directory(project.repository_storage, project.path)
- Gitlab::Shell.new.rm_directory(project.repository_storage, project.wiki.path)
+ TestEnv.rm_storage_dir(project.repository_storage, project.path)
+ TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
subject.perform(project.id)
- expect(Gitlab::Shell.new.exists?(project.repository_storage, project.wiki.path)).to eq(false)
+ expect(TestEnv.storage_dir_exists?(project.repository_storage, project.wiki.path)).to eq(false)
end
def create_push_event(project)
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index c3d577e2dae..59707409b5a 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -18,15 +18,30 @@ describe StuckCiJobsWorker do
end
shared_examples 'job is dropped' do
- before do
+ it "changes status" do
worker.perform
job.reload
- end
- it "changes status" do
expect(job).to be_failed
expect(job).to be_stuck_or_timeout_failure
end
+
+ context 'when job have data integrity problem' do
+ it "does drop the job and logs the reason" do
+ job.update_columns(yaml_variables: '[{"key" => "value"}]')
+
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
+ .with(anything, a_hash_including(extra: a_hash_including(build_id: job.id)))
+ .once
+ .and_call_original
+
+ worker.perform
+ job.reload
+
+ expect(job).to be_failed
+ expect(job).to be_data_integrity_failure
+ end
+ end
end
shared_examples 'job is unchanged' do
diff --git a/spec/workers/stuck_merge_jobs_worker_spec.rb b/spec/workers/stuck_merge_jobs_worker_spec.rb
index 09efed6d2cf..8ceaf1fc555 100644
--- a/spec/workers/stuck_merge_jobs_worker_spec.rb
+++ b/spec/workers/stuck_merge_jobs_worker_spec.rb
@@ -22,7 +22,7 @@ describe StuckMergeJobsWorker do
expect(mr_without_sha.merge_jid).to be_nil
end
- it 'updates merge request to opened when locked but has not been merged' do
+ it 'updates merge request to opened when locked but has not been merged', :sidekiq_might_not_need_inline do
allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(%w(123))
merge_request = create(:merge_request, :locked, merge_jid: '123', state: :locked)
pipeline = create(:ci_empty_pipeline, project: merge_request.project, ref: merge_request.source_branch, sha: merge_request.source_branch_sha)
diff --git a/spec/workers/wait_for_cluster_creation_worker_spec.rb b/spec/workers/wait_for_cluster_creation_worker_spec.rb
index 850eba263a7..b21a9b612af 100644
--- a/spec/workers/wait_for_cluster_creation_worker_spec.rb
+++ b/spec/workers/wait_for_cluster_creation_worker_spec.rb
@@ -8,8 +8,19 @@ describe WaitForClusterCreationWorker do
let(:cluster) { create(:cluster, provider_type: :gcp, provider_gcp: provider) }
let(:provider) { create(:cluster_provider_gcp, :creating) }
- it 'provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::VerifyProvisionStatusService).to receive(:execute)
+ it 'provisions a cluster' do
+ expect_any_instance_of(Clusters::Gcp::VerifyProvisionStatusService).to receive(:execute).with(provider)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+
+ context 'when provider type is aws' do
+ let(:cluster) { create(:cluster, provider_type: :aws, provider_aws: provider) }
+ let(:provider) { create(:cluster_provider_aws, :creating) }
+
+ it 'provisions a cluster' do
+ expect_any_instance_of(Clusters::Aws::VerifyProvisionStatusService).to receive(:execute).with(provider)
described_class.new.perform(cluster.id)
end